text
stringlengths 0
1.05M
| meta
dict |
---|---|
#$Id$#
from books.model.VendorPayment import VendorPayment
from books.model.VendorPaymentList import VendorPaymentList
from books.model.Bill import Bill
class VendorPaymentsParser:
"""This class is used to parse the json response for Vendor payments."""
def get_list(self, resp):
"""This method parses the given response and returns vendor payments
list object.
Args:
resp(dict): Dictionary containing json object for vendor payments
list.
Returns:
instance: Vendor payments list object.
"""
vendor_payments_list = VendorPaymentList()
for value in resp['vendorpayments']:
vendor_payment = VendorPayment()
vendor_payment.set_payment_id(value['payment_id'])
vendor_payment.set_vendor_id(value['vendor_id'])
vendor_payment.set_vendor_name(value['vendor_name'])
vendor_payment.set_payment_mode(value['payment_mode'])
vendor_payment.set_description(value['description'])
vendor_payment.set_date(value['date'])
vendor_payment.set_reference_number(value['reference_number'])
vendor_payment.set_exchange_rate(value['exchange_rate'])
vendor_payment.set_amount(value['amount'])
vendor_payment.set_paid_through_account_id(value[\
'paid_through_account_id'])
vendor_payment.set_paid_through_account_name(value[\
'paid_through_account_name'])
vendor_payment.set_balance(value['balance'])
vendor_payments_list.set_vendor_payments(vendor_payment)
return vendor_payments_list
def get_vendor_payment(self, resp):
"""This method is used to parse the given response and returns vendor
payments object.
Args:
resp(dict): Dictionary containing json object for vendor payments.
Returns:
instance: Vendor payments object.
"""
vendor_payment_obj = VendorPayment()
vendor_payment = resp['vendorpayment']
vendor_payment_obj.set_payment_id(vendor_payment['payment_id'])
vendor_payment_obj.set_vendor_id(vendor_payment['vendor_id'])
vendor_payment_obj.set_vendor_name(vendor_payment['vendor_name'])
vendor_payment_obj.set_payment_mode(vendor_payment['payment_mode'])
vendor_payment_obj.set_description(vendor_payment['description'])
vendor_payment_obj.set_date(vendor_payment['date'])
vendor_payment_obj.set_reference_number(vendor_payment[\
'reference_number'])
vendor_payment_obj.set_exchange_rate(vendor_payment['exchange_rate'])
vendor_payment_obj.set_amount(vendor_payment['amount'])
vendor_payment_obj.set_currency_symbol(vendor_payment[\
'currency_symbol'])
vendor_payment_obj.set_paid_through_account_id(vendor_payment[\
'paid_through_account_id'])
vendor_payment_obj.set_paid_through_account_name(vendor_payment[\
'paid_through_account_name'])
for value in vendor_payment['bills']:
bill = Bill()
bill.set_bill_number(value['bill_number'])
bill.set_bill_payment_id(value['bill_payment_id'])
bill.set_bill_id(value['bill_id'])
bill.set_total(value['total'])
bill.set_balance(value['balance'])
bill.set_amount_applied(value['amount_applied'])
bill.set_date(value['date'])
bill.set_due_date(value['due_date'])
vendor_payment_obj.set_bills(bill)
return vendor_payment_obj
def get_message(self, resp):
"""This message parses the given response and returns message string.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['message']
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/parser/VendorPaymentsParser.py",
"copies": "1",
"size": "3926",
"license": "mit",
"hash": -3219629195741161000,
"line_mean": 40.3263157895,
"line_max": 78,
"alpha_frac": 0.6304126337,
"autogenerated": false,
"ratio": 4.110994764397906,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5241407398097906,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.BankAccountsParser import BankAccountsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'bankaccounts/'
zoho_http_client = ZohoHttpClient()
parser = BankAccountsParser()
class BankAccountsApi:
"""Bank Accounts Api is used to
1.List all bank and credit card accounts of the organization.
2.Get detailed look of a specified account.
3.Create a bank account or a credit card account for an organization.
4.Modify an existing account.
5.Delete an existing bank account from an organization.
6.Make an account inactive.
7.Make an account active.
8.Get the details of last imported statement.
9.Delete the statement that was imported lastly.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Bank accounts api using user's authtoken and
organization id.
Args:
authotoken(str): Authtoken.
organization id(str): Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_bank_accounts(self, parameter=None):
"""List all bank and credit accounts of an organization.
Args:
parameter(dict, optional): Filter with which the list has to be
displayed. Defaults to None.
Returns:
instance: Bank accounts list object.
"""
resp = zoho_http_client.get(base_url, self.details, parameter)
return parser.get_list(resp)
def get(self, account_id):
"""Get bank account details.
Args:
account_id(str): Account id.
Returns:
instance: Bank accouts object.
"""
url = base_url + account_id
resp = zoho_http_client.get(url, self.details)
return parser.get_account_details(resp)
def create(self, bank_account):
"""Create a bank account.
Args:
bank_account(instance): Bank accounts object.
Returns:
instance: Bank accounts object.
"""
json_object = dumps(bank_account.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_account_details(resp)
def update(self, account_id, bank_account):
"""Update an existing bank account.
Args:
account_id(str): Account id.
bank_account(instance): Bank account object.
Returns:
instance: Bank account object.
"""
url = base_url + account_id
json_object = dumps(bank_account.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_account_details(resp)
def delete(self, account_id):
"""Delete an existing bank account.
Args:
account_id(str): Account id.
Returns:
str: Success message('The account has been deleted.').
"""
url = base_url + account_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def deactivate_account(self, account_id):
"""Deactivate a bank account.
Args:
account_id(str): Account id.
Returns:
str: Success message('The account has been marked as inactive.').
"""
url = base_url + account_id + '/inactive'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def activate_account(self, account_id):
"""Activate a bank account.
Args:
account_id(str): Account id.
str: Success message('The account has been marked as active.').
"""
url = base_url + account_id + '/active'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def get_last_imported_statement(self, account_id):
"""Get the details of previously imported statement for the account.
Args:
account_id(str): Account id.
Returns:
instance: Statement object.
"""
url = base_url + account_id + '/statement/lastimported'
resp = zoho_http_client.get(url, self.details)
return parser.get_statement(resp)
def delete_last_imported_statement(self, account_id, statement_id):
"""Delete the statement that was previously imported.
Args:
account_id(str): Account id.
statement_id(str): Statement id.
Returns:
str: Success message('You have successfully deleted the last imported
statement.').
Raises:
Books Exception: If status is not '200' or '201'.
"""
url = base_url + account_id + '/statement/' + statement_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/BankAccountsApi.py",
"copies": "1",
"size": "5353",
"license": "mit",
"hash": 3971424996258980000,
"line_mean": 27.7795698925,
"line_max": 82,
"alpha_frac": 0.5800485709,
"autogenerated": false,
"ratio": 4.228278041074249,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.02605626876032225,
"num_lines": 186
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.BankRulesParser import BankRulesParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'bankaccounts/rules/'
zoho_http_client = ZohoHttpClient()
parser = BankRulesParser()
class BankRulesApi:
"""This class is used to
1.Fetch all the rules created for a specified bank or credit card account.
2.Get details of a specific rule.
3.Create a rule.
4.Update an existing rule.
5.Delete a rule.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Bank rules Api using user's authtoken and organization
id.
Args:
authotoken(str): User's Authtoken.
organization id(str): User's Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_rules(self, account_id):
"""Get list of rules.
Args:
account_id(str): Account id for which the rules have to be listed.
Returns:
instance: Bank rules list object.
"""
param = {
'account_id': account_id
}
resp = zoho_http_client.get(base_url, self.details, param)
return parser.get_rules(resp)
def get(self, rule_id):
"""Get details of a rule.
Args:
rule_id(str): Rule id.
Returns:
instance: Bank rules object.
"""
url = base_url + rule_id
resp = zoho_http_client.get(url, self.details)
return parser.get_rule(resp)
def create(self, rule):
"""Create a rule.
Args:
rule(instance): Bank rule object.
Returns:
instance: Bank rule object.
"""
json_object = dumps(rule.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_rule(resp)
def update(self, rule_id, rule):
"""Update an existing rule.
Args:
rule_id(str): Rule id.
Returns:
instance: Bank rule object.
"""
url = base_url + rule_id
json_object = dumps(rule.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_rule(resp)
def delete(self, rule_id):
"""Delete an existing rule.
Args:
rule_id(str): Rule id.
Returns:
str: Success message('The rule has been deleted.').
"""
url = base_url + rule_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/BankRulesApi.py",
"copies": "1",
"size": "2877",
"license": "mit",
"hash": -260287352271217660,
"line_mean": 24.2368421053,
"line_max": 78,
"alpha_frac": 0.5509210984,
"autogenerated": false,
"ratio": 3.9902912621359223,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5041212360535922,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.BankTransactionsParser import BankTransactionsParser
from books.api.Api import Api
from os.path import basename
from json import dumps
base_url = Api().base_url + 'banktransactions/'
zoho_http_client = ZohoHttpClient()
parser = BankTransactionsParser()
class BankTransactionsApi:
"""Bank Transactions Api class is used to
1.Get all the transaction details involved in an account.
2.Get the details of a transaction.
3.Create a bank transaction based on allowed transaction types.
4.Update an existing bank transaction.
5.Delete a transaction from an account.
6.Get matching transactions.
7.Match an uncategorized transaction with an existing transaction in the
account.
8.Unmatch a transaction that was previously matched and make it
uncategorized.
9.Get a list of all the associated that were matched or categorized to the
given imported transaction.
10.Exclude a transaction from your bank or credit card account.
11.Restore an excluded transaction in your account.
12.Categorize an uncategorized transaction by creating a new transaction.
13.Categorize an uncategorized transaction as a refund from credit note.
14.Categorize an uncategorized transaction as vendor payment.
15.Categorize an uncategorized transaction as customer payment.
16.categorize an uncategorized transaction as Expense.
17.Revert a categorized transaction as uncategorized.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Bank transaction api using user's authtoken and
organization id.
Args:
authotoken(str): User's Authtoken.
organization id(str): User's Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_bank_transactions(self, parameter=None):
"""Get all transaction details involved in an account.
Args:
parameter: Filter with which the list has to be displayed.
Returns:
instance: Bank transaction list object.
"""
resp = zoho_http_client.get(base_url, self.details, parameter)
return parser.get_list(resp)
def get(self, transaction_id):
"""Get bank transaction details.
Args:
transaction_id(str): Transaction id.
Returns:
instance: Bank transaction object.
"""
url = base_url + transaction_id
resp = zoho_http_client.get(url, self.details)
return parser.get_transaction(resp)
def create(self, transaction):
"""Create a bank transaction for an account.
Args:
transaction(instance): Bank Transaction object.
Returns:
instance: Bank transaction object.
"""
json_object = dumps(transaction.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_transaction(resp)
def update(self, bank_transaction_id, transaction):
"""Update an existing transaction.
Args:
bank_transaction_id(str): Bank transaction id.
transaction(instance): Bank transaction object.
Returns:
instance: Bank transaction object.
"""
url = base_url + bank_transaction_id
json_object = dumps(transaction.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_transaction(resp)
def delete(self, transaction_id):
"""Delete an existing transaction.
Args:
transaction_id(str): Transaction id.
Returns:
str: Success message('The transaction has been deleted.').
"""
url = base_url + transaction_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def get_matching_transactions(self, bank_transaction_id, parameters=None):
"""Provide criteria to search for matching uncategorized transactions.
Args:
bank_transaction_id(str): Bank transaction id.
parameters(dict, optional): Filter with which matching transactions
has to be displayed.
Returns:
instance: Transactions list object.
"""
url = base_url + 'uncategorized/' + bank_transaction_id + \
'/match'
resp = zoho_http_client.get(url, self.details, parameters)
return parser.get_matching_transaction(resp)
def match_a_transaction(self, transaction_id, transactions,
account_id=None):
"""Match an uncategorized transaction with an existing transaction in
the account.
Args:
transaction_id(str): Transaction id.
transactions(list of instance): List of transactions object.
account_id(str): Account id.
Returns:
str: Success message('The Uncategorized transaction is linked to
the selected transaction(s) in Zoho Books.').
"""
url = base_url + 'uncategorized/' + transaction_id + '/match'
data = {
'transactions_to_be_matched': []
}
for value in transactions:
transactions_to_be_matched = {}
transactions_to_be_matched['transaction_id'] = \
value.get_transaction_id()
transactions_to_be_matched['transaction_type'] = \
value.get_transaction_type()
data['transactions_to_be_matched'].append(\
transactions_to_be_matched)
if account_id is None:
query = None
else:
query = {
'account_id': account_id
}
json_string = {
'JSONString': dumps(data)
}
resp = zoho_http_client.post(url, self.details, json_string, query)
return parser.get_message(resp)
def unmatch_a_matched_transaction(self, transaction_id, account_id=None):
"""Unmatch a transaction that was previously matched and make it
uncategorized.
Args:
transaction_id(str): Transaction id.
account_id(str): Account id.
Returns:
str: Success message('The transaction has been unmatched.').
"""
url = base_url + transaction_id + '/unmatch'
if account_id is None:
parameter = None
else:
parameter = {
'account_id': account_id
}
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data, parameter)
return parser.get_message(resp)
def get_associated_transactions(self, transaction_id, sort_column=None):
"""Get a list of all the transactions that were matched or categorized
to the given imported transaction.
Args:
transaction_id(str): Transaction id.
sort_column(str): Param description. Allowed values are statement
date. Defaults to None.
Returns:
instance: Transaction list object.
"""
url = base_url + transaction_id + '/associated'
if sort_column is None:
param = None
else:
param = {
'sort_column': sort_column
}
resp = zoho_http_client.get(url, self.details, param)
return parser.get_associated_transaction(resp)
def exclude_a_transaction(self, transaction_id, account_id=None):
"""Exclude a transaction from your bank or credit card account.
Args:
transaction_id(str): Transaction id.
account_id(str, optional): Account id.
Returns:
str: Success message('The transaction has been excluded.').
"""
url = base_url + 'uncategorized/' + transaction_id + '/exclude'
if account_id is None:
param = None
else:
param = {
'account_id': account_id
}
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data, param)
return parser.get_message(resp)
def restore_a_transaction(self, transaction_id, account_id=None):
"""Restore a transaction.
Args:
transaction_id(str): Transaction id.
account_id(str,optional): Account id.
Returns:
str: Success message('The excluded transactions has been
restored.').
Raises:
Books Exception: If status is not '200' or '201'.
"""
url = base_url + 'uncategorized/' + transaction_id + '/restore'
if account_id is None:
param = None
else:
param = {
'account_id': account_id
}
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data, param)
return parser.get_message(resp)
def categorize_an_uncategorized_transaction(self, transaction_id, \
transaction):
"""Categorize an uncategorized transaction by creating a new
transaction.
Args:
transaction_id(str): Transaction id.
transaction(instance): Bank transaction object.
Returns:
str: Success message('The transaction is now categorized.').
"""
url = base_url + 'uncategorized/' + transaction_id + '/categorize'
json_object = dumps(transaction.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def categorize_as_credit_note_refunds(self, transaction_id, credit_note):
"""Categorize an uncategorized transaction as a refund from a credit
note.
Args:
transaction_id(str): Transaction id.
credit_note(instance): Credit note object.
Returns:
str: Success message('The transaction is now categorized.').
"""
url = base_url + 'uncategorized/' + transaction_id + \
'/categorize/creditnoterefunds'
json_object = dumps(credit_note.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def categorize_as_vendor_payment(self, transaction_id, vendor_payment):
"""Categorize an uncategorized transaction as Vendor payment.
Args:
transaction_id(str): Transaction id.
vendor_payment(instance): Vendor payment object.
Returns:
str: Success message('The transaction is now categorized.').
"""
url = base_url + 'uncategorized/' + transaction_id + \
'/categorize/vendorpayments'
json_object = dumps(vendor_payment.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def categorize_as_customer_payment(self, transaction_id, customer_payment, \
contact_ids=None):
"""Categorize an uncategorized transaction as Customer Payment.
Args:
transaction_id(str): Transaction id.
customer_payment(instance): Customer payment object.
contact_ids(str, optional): Contact ids. Defaults to None.
Returns:
str: Success message('The transaction is now categorized.').
"""
url = base_url + 'uncategorized/' + transaction_id + \
'/categorize/customerpayments'
json_object = dumps(customer_payment.to_json())
data = {
'JSONString': json_object
}
if contact_ids is None:
param = None
else:
param = {
'contact_ids': contact_ids
}
resp = zoho_http_client.post(url, self.details, data, param)
return parser.get_message(resp)
def categorize_as_expense(self, transaction_id, expense, receipt=None):
"""Categorize an uncategorized transaction as expense.
Args:
transaction_id(str): Transaction id.
expense(instance): Expense object.
receipt(file, optional): File to be attached. Allowed Extensions
are gif, png, jpeg, jpg, bmp and pdf.
Returns:
str: Success message('The transaction is now categorized.').
"""
url = base_url + 'uncategorized/' + transaction_id + \
'/categorize/expense'
json_object = dumps(expense.to_json())
data = {
'JSONString': json_object
}
if receipt is None:
attachments = None
else:
attachments = [{
'receipt': {
'filename': basename(receipt),
'content': open(receipt).read()
}
}]
print data
resp = zoho_http_client.post(url, self.details, data, None, attachments)
return parser.get_message(resp)
def uncategorize_a_categorized_transaction(self, transaction_id,
account_id=None):
"""Revert a categorized transaction as uncategorized.
Args:
transaction_id(str): Transaction id.
account_id(str, optional): Account id. Defaults to None.
Returns:
str: Success message('The transaction has been uncategorized.').
"""
url = base_url + transaction_id + '/uncategorize'
if account_id is None:
query = None
else:
query = {
'account_id': account_id
}
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data, query)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/BankTransactionsApi.py",
"copies": "1",
"size": "14491",
"license": "mit",
"hash": 6620214207236636000,
"line_mean": 32.6218097448,
"line_max": 80,
"alpha_frac": 0.5771168311,
"autogenerated": false,
"ratio": 4.446455968088371,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5523572799188371,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.BaseCurrencyAdjustmentParser import BaseCurrencyAdjustmentParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'basecurrencyadjustment/'
zoho_http_client = ZohoHttpClient()
parser = BaseCurrencyAdjustmentParser()
class BaseCurrencyAdjustmentApi:
"""Base Currency Adjsutment Api is used to:
1.List base currency adjustment.
2.Get base currency adjustment details.
3.List account details for base currency adjustment.
4.Creates a base currency adjustment.
5.Deletes a base currency adjustment.
"""
def __init__(self, authtoken, organization_id):
"""Initialize parameters for Base currency adjustment api.
Args:
authotoken(str): User's Authtoken.
organization id(str): User's Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_base_currency_adjustments(self, parameters=None):
"""List base currency adjustment.
Args:
parameters(dict, optional): Filter with whichj the list has to
be displayed. Defaults to None.
Returns:
instance: Base currency adjustment list object.
"""
resp = zoho_http_client.get(base_url, self.details, parameters)
return parser.get_list(resp)
def get(self, base_currency_adjustment_id):
"""Get base currency adjustment details.
Args:
base_currency_adjustment_id(str): Base currency adjustment id.
Returns:
instance: Base currency adjustment object.
"""
url = base_url + base_currency_adjustment_id
resp = zoho_http_client.get(url, self.details)
return parser.get_base_currency_adjustment(resp)
def list_account_details(self, parameters):
"""List account details for base currency adjustments.
Args:
parameters(dict): Parameters with which the list has to be
displayed.
Returns:
instance: Base currency adjustment object.
"""
url = base_url + 'accounts'
resp = zoho_http_client.get(url, self.details, parameters)
return parser.list_account_details(resp)
def create(self, base_currency_adjustment, account_id):
"""Create base currency adjustment.
Args:
base_currency_adjustment(instance): Base currency adjustment
object.
account_ids(str): Account ids.
Returns:
instance: Base currency adjustment object.
"""
json_object = dumps(base_currency_adjustment.to_json())
data = {
'JSONString': json_object
}
account_ids = {
'account_ids': account_id
}
resp = zoho_http_client.post(base_url, self.details, data, account_ids)
return parser.get_base_currency_adjustment(resp)
def delete(self, base_currency_adjustment_id):
"""Delete an existing base currency adjustment.
Args:
base_currency_adjustment_id(str): Base currency adjustment id.
Returns:
str: Success message('The selected base currency adjustment has
been deleted.').
"""
url = base_url + base_currency_adjustment_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/BaseCurrencyAdjustmentApi.py",
"copies": "1",
"size": "3563",
"license": "mit",
"hash": 4171211703789267000,
"line_mean": 29.9826086957,
"line_max": 82,
"alpha_frac": 0.6227897839,
"autogenerated": false,
"ratio": 4.45375,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.55765397839,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.BillsParser import BillsParser
from books.api.Api import Api
from os.path import basename
from json import dumps
base_url = Api().base_url + 'bills/'
zoho_http_client = ZohoHttpClient()
parser = BillsParser()
class BillsApi:
"""Bills Api is used to
1.List all bills with pagination
2.Get the details of a bill.
3.Create a bill received from a vendor.
4.Update an existing bill.
5.delete an existing bill.
6.Mark a bill status as void.
7.Mark a void bill as open.
8.Update the billing address.
9.Get the list of payments made for the bill.
10.Apply the vendor credits from excess vendor payments to a bill.
11.Delete a payment made to a bill.
12.Returns a file attached to the bill.
13.Attach a file to a bill.
14.Delete the file attached to a bill.
15.Get the cmplete history and comments of a bill.
16.Add a comment for a bill.
17.Delete a bill comment.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Bills api using user's authtoken and organization id.
Args:
authotoken(str): User's Authtoken.
organization id(str): User's Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_bills(self, parameter=None):
"""List all bills with pagination.
Args:
parameter(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Bill list object.
"""
resp = zoho_http_client.get(base_url, self.details, parameter)
return parser.get_list(resp)
def get(self, bill_id):
"""Get the details of a bill.
Args:
bill_id(str): Bill id.
Returns:
instance: Bill object.
"""
url = base_url + bill_id
resp = zoho_http_client.get(url, self.details)
return parser.get_bill(resp)
def create(self, bill, attachment=None):
"""Create a bill received from the vendor.
Args:
bill(instance): Bill object.
attachment(file, optional): File to be attached with the bill.
Allowed extensions are gif, png, jpeg, jpg, bmp and pdf.
Returns:
instance: Bill object.
"""
json_object = dumps(bill.to_json())
data = {
'JSONString': json_object
}
if attachment is None:
attachments = None
else:
attachments = [{
'attachment': {
'filename': basename(attachment),
'content': open(attachment).read()
}
}]
resp = zoho_http_client.post(base_url, self.details, data, None, \
attachments)
return parser.get_bill(resp)
def update(self, bill_id, bill, attachment=None):
"""Update an existing bill.
Args:
bill_id(str): Bill id.
bill(instance): Bill object.
attachment(file, optional): File to be attached with the bill.
Allowed extensions are gif, png, jpeg, jpg, bmp and pdf.
Returns:
instance: Bill object.
"""
url = base_url + bill_id
json_object = dumps(bill.to_json())
data = {
'JSONString': json_object
}
if attachment is None:
attachments = None
else:
attachments = [{
'attachment': {
'filename': basename(attachment),
'content': open(attachment).read()
}
}]
resp = zoho_http_client.put(url, self.details, data, None, \
attachments)
return parser.get_bill(resp)
def delete(self, bill_id):
"""Delete an existing bill.
Args:
bill_id(str): Bill id.
Returns:
str: Success message('The bill has been deleted.').
"""
url = base_url + bill_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def void_a_bill(self, bill_id):
"""Mark a bill status as void.
Args:
bill_id(str): Bill id.
Returns:
str: Success message('The bill has been marked as void.').
"""
url = base_url + bill_id + '/status/void'
resp = zoho_http_client.post(url, self.details, '')
return parser.get_message(resp)
def mark_a_bill_as_open(self, bill_id):
"""Mark a void bill as open.
Args:
bill_id(str): Bill id.
Returns:
str: Success message('The status of the bill has been changed from
void to open.').
"""
url = base_url + bill_id + '/status/open'
resp = zoho_http_client.post(url, self.details, '')
return parser.get_message(resp)
def update_billing_address(self, bill_id, billing_address):
"""Update the billing address for the bill.
Args:
bill_id(str): Bill id.
billing_address(instance): Billing address object.
Returns:
str: Success message('Billing address update.').
"""
url = base_url + bill_id + '/address/billing'
json_object = dumps(billing_address.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_message(resp)
def list_bill_payments(self, bill_id):
"""Get the list of payments made for the bill.
Args:
bill_id(str): Bill id.
Returns:
instance: Payments list object.
"""
url = base_url + bill_id + '/payments'
resp = zoho_http_client.get(url, self.details)
return parser.get_payments_list(resp)
def apply_credits(self, bill_id, bill_payments):
"""Apply the vendor credits from excess vendor payments to a bill.
Args:
bill_id(str): Bill id.
bill_payments(list of instance): list of payments object.
Returns:
str: Success message('Credits have been applied to the bill.').
"""
url = base_url + bill_id + '/credits'
data = {}
bill_payments_list = []
for value in bill_payments:
bill_payment = {}
bill_payment['payment_id'] = value.get_payment_id()
bill_payment['amount_applied'] = value.get_amount_applied()
bill_payments_list.append(bill_payment)
data['bill_payments'] = bill_payments_list
json_string = {
'JSONString': dumps(data)
}
resp = zoho_http_client.post(url, self.details, json_string)
return parser.get_message(resp)
def delete_a_payment(self, bill_id, bill_payment_id):
"""Delete a payment made to a bill.
Args:
bill_id(str): Bill id.
bill_payment_id(str): Bill payment id.
Returns:
str: Success message('The payment has been deleted.').
Raises:
Books Exception: If status is not '200' or '201'.
"""
url = base_url + bill_id + '/payments/' + bill_payment_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def get_a_bill_attachment(self, bill_id, preview=None):
"""Get the file attached to the bill.
Args:
bill_id(str): Bill id.
preview(bool, optional): True to get the thumbnail of the
attachment else False.
Returns:
file: File attached to the bill.
"""
if preview is not None:
query = {
'preview': preview
}
else:
query = None
url = base_url + bill_id + '/attachment'
resp = zoho_http_client.getfile(url, self.details, query)
return resp
def add_attachments_to_a_bill(self, bill_id, attachment):
"""Attach a file to a bill.
Args:
bill_id(str): Bill id.
attachment(file): File to attach. Allowed extensions are gif, png,
jpeg, jpg, bmp, pdf, xls, xlsx, doc, docx.
Returns:
str: Success message('The document has been deleted.').
"""
url = base_url + bill_id + '/attachment'
attachments = [{
'attachment': {
'filename': basename(attachment),
'content': open(attachment).read()
}
}]
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data, None, attachments)
return parser.get_message(resp)
def delete_an_attachment(self, bill_id):
"""Delete the file attached to a bill.
Args:
bill_id(str): Bill id.
Returns:
str: Success message('The attachment has been deleted.').
"""
url = base_url + bill_id + '/attachment'
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def list_bill_comments_and_history(self, bill_id):
"""Get the complete history and comments of a bill.
Args:
bill_id(str): Bill id.
Returns:
instance: Comments list object.
"""
url = base_url + bill_id + '/comments'
resp = zoho_http_client.get(url, self.details)
return parser.get_comments(resp)
def add_comment(self, bill_id, description):
"""Add a comment for a bill.
Args:
bill_id(str): Bill id.
description(str): Description.
Returns:
str: Success message('Comments added.').
"""
url = base_url + bill_id + '/comments'
data = {
'description': description
}
json_string = {
'JSONString': dumps(data)
}
resp = zoho_http_client.post(url, self.details, json_string)
return parser.get_comment(resp)
def delete_a_comment(self, bill_id, comment_id):
"""Delete a bill comment.
Args:
bill_id(str): Bill id.
comment_id(str): Comment id.
Returns:
str: Success message('The comment has been deleted.').
Raises:
Books Exception: If status is '200' or '201'.
"""
url = base_url + bill_id + '/comments/' + comment_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/BillsApi.py",
"copies": "1",
"size": "11008",
"license": "mit",
"hash": 5572489422318638000,
"line_mean": 28.7513513514,
"line_max": 80,
"alpha_frac": 0.5371547965,
"autogenerated": false,
"ratio": 3.9230220955096224,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9771492288595829,
"avg_score": 0.0377369206827588,
"num_lines": 370
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.ChartOfAccountsParser import ChartOfAccountsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'chartofaccounts/'
parser = ChartOfAccountsParser()
zoho_http_client = ZohoHttpClient()
class ChartOfAccountsApi:
"""Chart of Accounts Api is used to:
1.List chart of Accounts.
2.Get the details of an account.
3.Creates an account with the given account type.
4.Updates an existing account.
5.Delete an existing account.
6.Update the account status as active.
7.Update the account status as inactive.
8.List all invoices transactions for the given account.
9.Deletes the transaction.
"""
def __init__(self, authtoken, organization_id):
"""Initilaize Chart of accounts api using user's authtoken and
organization id.
Args:
authotoken(str): User's Authtoken.
organization id(str): User's Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_chart_of_accounts(self, parameters=None):
"""Get list of chart of accounts.
Args:
parameters(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Chart of accounts list object.
"""
resp = zoho_http_client.get(base_url, self.details, parameters)
return parser.get_list(resp)
def get(self, account_id):
"""Get details of an account.
Args:
account_id(str): Account id.
Returns:
instance: Chart of accounts object.
"""
url = base_url + account_id
resp = zoho_http_client.get(url, self.details)
return parser.get_account(resp)
def create(self, account):
"""Create an account.
Args:
account(instance): Chart of accounts object.
Returns:
instance: Chart of accounts object.
"""
json_object = dumps(account.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_account(resp)
def update(self, account_id, account):
"""Update an account.
Args:
account_id(str): Account id.
account(instance): Chart of accounts object.
Returns:
instance: Chart of accounts object.
"""
url = base_url + account_id
json_object = dumps(account.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_account(resp)
def delete(self, account_id):
"""Delete an account.
Args:
account_id(str): Account id.
Returns:
str: Success message('The account has been deleted.').
"""
url = base_url + account_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def mark_an_account_as_active(self, account_id):
"""Mark an account as active.
Args:
account_id(str): Account id.
Returns:
str: Success message('The account has been marked as active.').
"""
data = {
'JSONString': ''
}
url = base_url + account_id + '/active'
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def mark_an_account_as_inactive(self, account_id):
"""Mark an account as inactive.
Args:
account_id(str): Account id.
Returns:
str: Success message('The account has been marked as inactive.').
"""
data = {
'JSONString': ''
}
url = base_url + account_id + '/inactive'
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def list_of_transactions(self, parameters=None):
"""List all involved transactions for a given account.
Args:
parameters(dict): Dictionary containing values for account id, date amount
filter_by, transaction type and sort column.
Returns:
instance: Transaction list object.
"""
url = base_url + 'transactions'
resp = zoho_http_client.get(url, self.details, parameters)
return parser.get_transactions_list(resp)
def delete_a_transaction(self, transaction_id):
"""Delete the transaction.
Args:
transaction_id(str): Transaction id.
Returns:
str: Success message('The transaction has been deleted.').
"""
url = base_url + 'transactions/' + transaction_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/ChartOfAccountsApi.py",
"copies": "1",
"size": "5121",
"license": "mit",
"hash": -3197973844767088600,
"line_mean": 27.2928176796,
"line_max": 87,
"alpha_frac": 0.578012107,
"autogenerated": false,
"ratio": 4.221764220939819,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5299776327939819,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.ContactParser import ContactParser
from Api import Api
from json import dumps
base_url = Api().base_url + 'contacts/'
parser = ContactParser()
zoho_http_client = zoho_http_client = ZohoHttpClient()
class ContactPersonsApi:
"""ContactPersonsApi class is used to:
1.To get the list of contact persons of a contact with pagination.
2.To get the details of a contact person.
3.To create a contact person for a contact.
4.To update an existing contact person.
5.To delete a contact person.
6.To mark a contact person as primary for the contact.
"""
def __init__(self, authtoken, organization_id):
"""Initialize ContactPersons Api using user's authtoken and
organization id.
Args:
authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details={
'authtoken':authtoken,
'organization_id':organization_id,
}
def get_contact_persons(self, contact_id):
"""List contact persons of a contact with pagination.
Args:
contact_id(str): Contact id of a contact.
Returns:
instance: Contact Persons list object.
"""
url = base_url + contact_id + '/contactpersons'
response=zoho_http_client.get(url, self.details)
return parser.get_contact_persons(response)
def get(self, contact_person_id):
"""Get the contact persons details.
Args:
contact_id(str): Contact id of a contact.
Returns:
instance: Contact Person object.
"""
url = base_url + 'contactpersons/' + contact_person_id
response = zoho_http_client.get(url, self.details)
return parser.get_contact_person(response)
def create(self, contact_person):
"""Create a contact person for contact.
Args:
contact_id(str): Contact_id of a contact.
Returns:
instance: Contact person object.
"""
url = base_url + 'contactpersons'
json_object = dumps(contact_person.to_json())
print json_object
data = {
'JSONString': json_object
}
response = zoho_http_client.post(url, self.details, data)
return parser.get_contact_person(response)
def update(self, contact_person_id, contact_person):
"""Update an existing contact person.
Args:
contact_id(str): Contact_id of a contact.
contact_person(instance): Contact person object.
Returns:
instance: Contact person object.
"""
url = base_url + 'contactpersons/' + contact_person_id
json_object = dumps(contact_person.to_json())
data = {
'JSONString': json_object
}
response = zoho_http_client.put(url, self.details, data)
return parser.get_contact_person(response)
def delete(self, contact_person_id):
"""Delete a contact person.
Args:
contact_person_id(str): Contact person id of a contact person.
Returns:
str: Success message('The contact person has been deleted').
"""
url = base_url + 'contactpersons/' + contact_person_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def mark_as_primary_contact(self, contact_person_id):
"""Mark a contact person as primary for the contact.
Args:
contact_person_id(str): Contact person id of a contact person.
Returns:
str: Success message('This contact person has been marked as
your primary contact person.').
"""
url = base_url + 'contactpersons/' + contact_person_id + '/primary'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/ContactPersonsApi.py",
"copies": "1",
"size": "4091",
"license": "mit",
"hash": 2071456816183093200,
"line_mean": 29.5298507463,
"line_max": 75,
"alpha_frac": 0.5996088976,
"autogenerated": false,
"ratio": 4.21318228630278,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.027446354414678777,
"num_lines": 134
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.CreditNotesParser import CreditNotesParser
from os.path import basename
from json import dumps
from books.api.Api import Api
base_url = Api().base_url + 'creditnotes/'
parser = CreditNotesParser()
zoho_http_client = ZohoHttpClient()
class CreditNotesApi:
"""Creditnotes api class is used to
1.List Credits notes with pagination.
2.Get details of a credit note.
3.Create a credit note for a custoomer.
4.Update an existing creditnote.
5.Delete an existing creditnote.
6.Change an existing creditnote status to open.
7.Mark an existing creditnote as void.
8.Email a creditnote to a customer.
9.Get email history of a credit note.
10.Get email content of a credit note.
11.Update the billing address of an existing credit note.
12.Update the shipping address of an existing credit note.
13.Get all credit note pdf templates.
14.Update the pdf template associated with the creditnote.
15.List invooices to which the credit note is applied.
16.Apply credit note to existing invoices.
17.Delete the credits applied to an invoice.
18.List all refunds with pagination.
19.List all refunds of an existing credit note.
20.Get refund of a particular credit note.
21.Refund credit note amount.
22.Update the refunded transaction.
23.Delete a credit note refund.
24.Get history and comments of a credit note.
25.Add a comment to an existing credit note.
26.Delete a credit note comment.
"""
def __init__(self, authtoken, organization_id):
"""Initialize credit notes api using user's authtoken and organization
id.
Args:
authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details = {
'authtoken':authtoken,
'organization_id':organization_id
}
def get_credit_notes(self, parameter=None):
"""List all credit notes with pagination.
Args:
parameter(dict, optional): Filter with which the list has to be
displayed. Defaults to None.
Returns:
instance: Credit notes list object.
Raises:
Books Exception: If status is '200' or '201'.
"""
response = zoho_http_client.get(base_url, self.details, parameter)
return parser.creditnotes_list(response)
def get_credit_note(self, creditnote_id, print_pdf=None, accept=None):
"""Get details of a credit note.
Args:
creditnote_id(str): Credit note id.
print_pdf(bool, optional): Export credit note pdf with default
option. Allowed values are true, false,on and off.
Returns:
instance: Credit note object.
Raises:
Books Exception: If status is not '200' or '201'.
"""
url = base_url + creditnote_id
if print_pdf is not None and accept is not None:
query = {
'print':str(print_pdf).lower(),
'accept':accept
}
response = zoho_http_client.getfile(url, self.details, query)
return response
elif print_pdf is not None:
query = {
'print':str(print_pdf).lower()
}
if print_pdf:
query.update({
'accept':'pdf'
})
response = zoho_http_client.getfile(url, self.details, query)
return response
else:
response = zoho_http_client.get(url, self.details, query)
return parser.get_creditnote(response)
elif accept is not None:
query = {
'accept':accept
}
response = zoho_http_client.getfile(url, self.details, query)
return response
else:
response = zoho_http_client.get(url, self.details)
return parser.get_creditnote(response)
def create(self, credit_note, invoice_id=None, \
ignore_auto_number_generation=None):
"""Creates a credit note.
Args:
credit_note(instance): Credit note object.
invoice_id(str, optional): Invoice id for which invoice has to be
applied.
ignore_auto_number_generation(bool, optional): True to ignore auto
number generation else False. If set True this parameter
becomes mandatory.
Returns:
instance: Credit note object.
"""
json_object = dumps(credit_note.to_json())
data = {
'JSONString': json_object
}
if invoice_id is not None and ignore_auto_number_generation is not \
None:
query = {
'invoice_id':invoice_id,
'ignore_auto_number_generation':ignore_auto_number_generation
}
elif invoice_id is not None:
query = {
'invoice_id':invoice_id
}
elif ignore_auto_number_generation is not None:
query = {
'ignore_auto_number_generation':ignore_auto_number_generation
}
else:
query = None
response = zoho_http_client.post(base_url, self.details, data, query)
return parser.get_creditnote(response)
def update(self, credit_note_id, credit_note, \
ignore_auto_number_generation=None):
"""Update an existing credit note.
Args:
credit_note_id(str): Credit note id.
credit_note(instance): Credit note object.
ignore_auto_number_generation(bool, optional): True to ignore auto
number generation. If this is set true then this parameter is
mandatory.
Returns:
instance: Credit note object.
"""
url = base_url + credit_note_id
json_object = dumps(credit_note.to_json())
data = {
'JSONString': json_object
}
if ignore_auto_number_generation is not None:
query = {
'ignore_auto_number_generation':ignore_auto_number_generation
}
else:
query = None
response = zoho_http_client.put(url, self.details, data, query)
return parser.get_creditnote(response)
def delete(self, creditnote_id):
"""Delete an existing credit note.
Args:
creditnote_id(str): Credit note id.
Returns:
str: Success message('The credit note has been deleted.').
"""
url = base_url + creditnote_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def convert_to_open(self, creditnote_id):
"""Change an existing credit note status to open.
Args:
creditnote_id(str): Credit note id.
Returns:
str: Success message(the status of the credit note has been changed
to open.
"""
url = base_url + creditnote_id + '/status/open'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def void_credit_note(self, creditnote_id):
"""Mark an existing credit note as void.
Args:
creditnote_id(str): Credit note id.
Returns:
str: Success message('The credit note has been marked as void.').
"""
url = base_url + creditnote_id + '/status/void'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def email_credit_note(self, creditnote_id, email, attachment=None,
customer_id=None):
"""Email a credit note to the customer.
Args:
creditnote_id(str): Creditnote id.
email(instance): Email object.
attachment(list of dict, optional): List of dictionary containing
details of the files to be attached.
customer_id(str, optional): Id of the customer.
Returns:
str: Success message('Your credit note has been sent.').
"""
url = base_url + creditnote_id + '/email'
json_object = dumps(email.to_json())
data = {
'JSONString': json_object
}
if attachment is not None and customer_id is not None:
query = {
'customer_id':customer_id
}
file_list = []
for value in attachment:
attachments = {
'attachments': {
'filename':basename(value),
'content':open(value).read()
}
}
file_list.append(attachments)
elif attachment is not None:
file_list = []
for value in attachment:
attachments = {
'attachments': {
'filename':basename(value),
'content':open(value).read()
}
}
file_list.append(attachments)
query = None
elif customer_id is not None:
query = {
'customer_id': customer_id
}
file_list = None
else:
query = None
file_list = None
response = zoho_http_client.post(url, self.details, data, query,
file_list)
return parser.get_message(response)
def email_history(self, creditnote_id):
"""Get email history of a credit note.
Args:
creditnote_id(str): Credit note id.
Returns:
instance: Email object.
"""
url = base_url + creditnote_id + '/emailhistory'
response = zoho_http_client.get(url, self.details)
return parser.email_history(response)
def get_email_content(self, creditnote_id, email_template_id=None):
"""Get email content of a credit note.
Args:
creditnote_id(str): Creditnote id.
email_template_id(str, optional): Email template id.
Returns:
instance): Email object.
"""
url = base_url + creditnote_id + '/email'
if email_template_id is not None:
query = {
'email_template_id': email_template_id
}
else:
query = None
response = zoho_http_client.get(url, self.details, query)
return parser.email(response)
def update_billing_address(self, creditnote_id, address,
is_update_customer=None):
"""Update billing address.
Args:
creditnote_id(str): Credit note id.
address(instance): Address object.
is_update_customer(bool, optional): True to update customer else
False.
Returns:
instance: Address object.
"""
url = base_url + creditnote_id + '/address/billing'
json_object = dumps(address.to_json())
data = {
'JSONString': json_object
}
if is_update_customer is not None:
query = {
'is_update_customer': is_update_customer
}
else:
query = None
response = zoho_http_client.put(url, self.details, data, query)
return parser.get_billing_address(response)
def update_shipping_address(self, creditnote_id, address,
is_update_customer=None):
"""Update shipping address.
Args:
creditnote_id(str): Credit note id.
address(instance): Address object.
is_update_customer(bool, optional): True to update customer
else False.
Returns:
instance: Address object.
"""
url = base_url + creditnote_id + '/address/shipping'
json_object = dumps(address.to_json())
data = {
'JSONString': json_object
}
if is_update_customer is not None:
query = {
'is_update_customer': is_update_customer
}
else:
query = None
response = zoho_http_client.put(url, self.details, data, query)
return parser.get_shipping_address(response)
def list_credit_note_template(self):
"""Get all credit note pdf templates.
Returns:
list of instance: List of templates object.
"""
url = base_url + 'templates'
response = zoho_http_client.get(url, self.details)
return parser.list_templates(response)
def update_credit_note_template(self, creditnote_id, template_id):
"""Update credit note template.
Args:
creditnote_id(str): Credit note id.
template_id(str): Template id.
Returns:
str: Success message('The credit note has been updated.').
"""
url = base_url + creditnote_id + '/templates/' + template_id
response = zoho_http_client.put(url, self.details, '')
return parser.get_message(response)
#### Apply to Invoice------------------------------------------------------------------------------------------------------------
def list_invoices_credited(self, creditnote_id):
"""List invoices to which credit note is applied.
Args:
creditnote_id(str): Credit note id.
Returns:
list of instance: List of invoices credited object.
Raises:
Books Exception: If status is not '200' or '201'.
"""
url = base_url + creditnote_id + '/invoices'
response = zoho_http_client.get(url, self.details)
return parser.list_invoices_credited(response)
def credit_to_invoice(self, creditnote_id, invoice):
"""List invoices to which the credited note is applied.
Args:
creditnote_id(str): Credit note id.
invoice(instance): Invoice object.
Returns:
list of invoices: List of invoice objects.
"""
url = base_url + creditnote_id + '/invoices'
invoices = {
'invoices': []
}
for value in invoice:
data = {}
data['invoice_id'] = value.get_invoice_id()
data['amount_applied'] = value.get_amount_applied()
invoices['invoices'].append(data)
json_string = {
'JSONString': dumps(invoices)
}
response = zoho_http_client.post(url, self.details, json_string)
return parser.credit_to_invoice(response)
def delete_invoices_credited(self, creditnote_id, creditnote_invoice_id):
"""Delete the credits applied to an invoice.
Args:
creditnote_id(str): Credit note id.
creditnote_invoice_id(str): Credit note invoice id.
Returns:
str: Success message('Credits applied to an invoice have been
deleted.').
"""
url = base_url + creditnote_id + '/invoices/' + creditnote_invoice_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
## REFUND-----------------------------------------------------------------------------------------------------------------------
def list_credit_note_refunds(self, customer_id=None, sort_column=None):
"""List all refunds wiht pagination.
Args:
customer_id(str, optional): Customer id.
sort_column(str, optional): Sort refund list. Allowed values are
refund_mode, reference_number, date, creditnote_number,
customer_name, amount_bcy and amount_fcy.
Returns:
instance: Credit notes refund list object.
"""
url = base_url + 'refunds'
if customer_id is not None and sort_column is not None:
parameter = {
'customer_id':customer_id,
'sort_column':sort_column
}
elif sort_column is not None or customer_id is not None:
parameter = {
'sort_column': sort_column
} if sort_column is not None else {
'customer_id': customer_id
}
else:
parameter = None
response = zoho_http_client.get(url, self.details, parameter)
return parser.creditnote_refunds(response)
def list_refunds_of_credit_note(self, creditnote_id):
"""List all refunds of an existing credit note.
Args:
creditnote_id(str): Credit note id.
Returns:
instance: Creditnotes refund list.
"""
url = base_url + creditnote_id + '/refunds'
response = zoho_http_client.get(url, self.details)
return parser.creditnote_refunds(response)
def get_credit_note_refund(self, creditnote_id, creditnote_refund_id):
"""Get credit note refund.
Args:
creditnote_id(str): Credit note id.
creditnote_refund_id(str): Creditnote refund id.
Returns:
instance: Creditnote refund object.
"""
url = base_url + creditnote_id + '/refunds/' + creditnote_refund_id
response = zoho_http_client.get(url, self.details)
return parser.get_creditnote_refund(response)
def refund_credit_note(self, creditnote_id, creditnote):
"""Refund credit note amount.
Args:
creditnote_id(str): Credit note id.
Returns:
creditnote(instance): Credit note object.
"""
url = base_url + creditnote_id + '/refunds'
json_object = dumps(creditnote.to_json())
data = {
'JSONString': json_object
}
response = zoho_http_client.post(url, self.details, data)
return parser.get_creditnote_refund(response)
def update_credit_note_refund(self, creditnote_id, creditnote_refund_id,
creditnote):
"""Update the refunded transaction.
Args:
creditnote_id(str): Credit note id.
creditnote_refund_id(str): Credit note refund id.
creditnote(instance): Credit note object.
Returns:
instance: Creditnote object.
"""
url = base_url + creditnote_id + '/refunds/' + creditnote_refund_id
json_object = dumps(creditnote.to_json())
data = {
'JSONString': json_object
}
response = zoho_http_client.put(url, self.details, data)
return parser.get_creditnote_refund(response)
def delete_credit_note_refund(self, creditnote_id, creditnote_refund_id):
"""Delete a credit note refund.
Args:
creditnote_id(str): Credit note id.
creditnote_refund_id(str): Credit note refund id.
Returns:
str: Success message('The refund has been successfully deleted.').
"""
url = base_url + creditnote_id + '/refunds/' + creditnote_refund_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
## Comments and History----------------------------------------------------------------------------------------------------------
def list_creditnote_comments_history(self, creditnote_id):
"""Get history and comments of a credit note.
Args:
creditnote_id(str): Credit note id.
Returns:
instance: Comments list object.
"""
url = base_url + creditnote_id + '/comments'
response = zoho_http_client.get(url, self.details)
return parser.comments_list(response)
def add_comment(self, creditnote_id, comments):
"""Add a comment to an existing credit note.
Args:
creditnote_id(str): Credit note id.
comments(instance): Comments object.
Returns:
instance: Comments object.
"""
url = base_url + creditnote_id + '/comments'
data = {}
data['description'] = comments.get_description()
json_string = {
'JSONString': dumps(data)
}
response = zoho_http_client.post(url, self.details, json_string)
return parser.get_comment(response)
def delete_comment(self, creditnote_id, comment_id):
"""Delete a credit note comment.
Args:
creditnote_id(str): Credit note id.
comment_is(str): Comment id.
Returns:
str: Success message('The comment has been deleted.').
"""
url = base_url + creditnote_id + '/comments/' + comment_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/CreditNotesApi.py",
"copies": "1",
"size": "21312",
"license": "mit",
"hash": 1381149148898074600,
"line_mean": 32.6151419558,
"line_max": 129,
"alpha_frac": 0.5508633634,
"autogenerated": false,
"ratio": 4.3996696944673825,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5450533057867383,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.CustomerPaymentsParser import CustomerPaymentsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'customerpayments/'
parser = CustomerPaymentsParser()
zoho_http_client = ZohoHttpClient()
class CustomerPaymentsApi:
"""CustomerPaymentsApi class is used:
1.To list all the payments made by the customer.
2.To get the details of the customer payment.
3.To create a payment made by the customer.
4.To update a payment made by the customer.
5.To delete an existing customer payment.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Customer payment's api using user's authtoken
and organization id.
Args:
authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details={
'authtoken': authtoken,
'organization_id': organization_id
}
def get_customer_payments(self, parameter=None):
"""List all customer payments with pagination.
Args:
parameter(dict, optional): Filter with which the list has
to be displayed. Default to None.
Returns:
instance: Customer payments list object.
"""
response = zoho_http_client.get(base_url, self.details, parameter)
return parser.customer_payments(response)
def get(self, payment_id):
"""Get details of a customer payment.
Args:
payment_id(str): Payment id of the customer.
Returns:
instance: Customer payment object.
"""
url = base_url + payment_id
response = zoho_http_client.get(url, self.details)
return parser.get_customer_payment(response)
def create(self, customer_payment):
"""Create a payment made by the customer.
Args:
customer_payment(instance): customer payment object.
Returns:
instance: customer payment object.
"""
json_object = dumps(customer_payment.to_json())
data = {
'JSONString': json_object
}
response = zoho_http_client.post(base_url,self.details,data)
return parser.get_customer_payment(response)
def update(self, payment_id, customer_payment):
"""Update a payment made by a customer.
Args:
payment_id(str): Payment id.
customer_payment(instance): Customer payment object.
Returns:
instance: Customer payment object.
"""
url = base_url + payment_id
json_object = dumps(customer_payment.to_json())
data = {
'JSONString': json_object
}
response = zoho_http_client.put(url,self.details,data)
return parser.get_customer_payment(response)
def delete(self, payment_id):
"""Delete an existing customer payment.
Args:
payment_id(str): Payment id.
Returns:
str: Success message ('The payment has been deleted.')
"""
url = base_url + payment_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/CustomerPaymentsApi.py",
"copies": "1",
"size": "3390",
"license": "mit",
"hash": 2193655016668766000,
"line_mean": 28.7368421053,
"line_max": 75,
"alpha_frac": 0.5997050147,
"autogenerated": false,
"ratio": 4.340588988476313,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.05148122523428413,
"num_lines": 114
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.ExpensesParser import ExpensesParser
from os.path import basename
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'expenses/'
parser = ExpensesParser()
zoho_http_client = ZohoHttpClient()
class ExpensesApi:
"""Expenses Api is used to:
1.List expenses with pagination.
2.Get the details of an expense.
3.Create a billable or non-billable expense.
4.Update an existing expense.
5.Delete an expense.
6.Get history and comments of an expense.
7.Returns the receipt attached to an expense.
8.Attach a receipt to an expense.
9.Delete the receipt attached to the expense.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Expenses Api using user's authtoken and organization
id.
Args:
authtoken(str): User's Authtoken.
organization_id(str): User's organization id.
"""
self.details = {
'authtoken':authtoken,
'organization_id':organization_id
}
def get_expenses(self, parameter=None):
"""List expenses with pagination.
Args:
parameter(dict, optional): Filter with which expenses list has to
be displayed. Defaults to None.
Returns:
instance: Expenses list object.
"""
resp = zoho_http_client.get(base_url, self.details, parameter)
return parser.get_list(resp)
def get(self, expense_id):
"""Get details of an expense.
Args:
expense_id(str): Expense id.
Returns:
instance: Expense object.
"""
url = base_url + expense_id
resp = zoho_http_client.get(url, self.details)
return parser.get_expense(resp)
def create(self, expense, receipt=None):
"""Create an expense.
Args:
expense(instance): Expense object.
receipt(file, optional): Expense receipt file to attach.Allowed
Extensions: gif, png, jpeg, jpg, bmp and pdf.
Returns:
instance: Expense object.
"""
json_object = dumps(expense.to_json())
data = {
'JSONString': json_object
}
if receipt is not None:
attachments = [{
'receipt': {
'filename': basename(receipt),
'content': open(receipt).read()
}
}]
else:
attachments = None
resp = zoho_http_client.post(base_url, self.details, data, None, \
attachments)
return parser.get_expense(resp)
def update(self, expense_id, expense, receipt=None):
"""Update an existing expense.
Args:
expense_id(str): Expense id.
expense(instance): Expense object.
receipt(file): Expense receipt file to attach. Allowed Extensions:
gif, png, jpeg, jpg, bmp and pdf.
Returns:
instance: Expense object.
"""
url = base_url + expense_id
json_object = dumps(expense.to_json())
data = {
'JSONString': json_object
}
if receipt is None:
attachments = None
else:
attachments = [{
'receipt': {
'filename': basename(receipt),
'content': open(receipt).read()
}
}]
resp = zoho_http_client.put(url, self.details, data, None,
attachments)
return parser.get_expense(resp)
def delete(self, expense_id):
"""Delete an existing expense.
Args:
expense_id(str): Expense id.
Returns:
str: Success message('The expense has been deleted.').
"""
url = base_url + expense_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def list_comments_history(self, expense_id):
"""Get history and comments of an expense.
Args:
expense_id(str): Expense id.
Returns:
instance: comments list object.
"""
url = base_url + expense_id + '/comments'
resp = zoho_http_client.get(url, self.details)
return parser.get_comments(resp)
def get_receipt(self, expense_id, preview=None):
"""Get the receipt attached to an expense.
Args:
expense_id(str): Expense id.
preview(bool, optional): True to get the thumbnail of the receipt.
Returns:
file: Returns the receipt attached to the expense.
"""
url = base_url + expense_id + '/receipt'
if preview is not None:
query = {
'preview': preview
}
else:
query = None
resp = zoho_http_client.getfile(url, self.details, query)
return resp
def add_receipt(self, expense_id, receipt):
"""Attach a receipt to an expense.
Args:
expense_id(str): Expense id.
receipt(file): Receipt to be attached.Allowed Extensions: gif, png,
jpeg, jpg, bmp, pdf, xls, xlsx, doc and docx.
Returns:
str: Success message('The expense receipt has been attached.').
"""
url = base_url + expense_id + '/receipt'
attachments = [{
'receipt': {
'filename': basename(receipt),
'content': open(receipt).read()
}
}]
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data, None, attachments)
return parser.get_message(resp)
def delete_receipt(self, expense_id):
"""Delete the receipt attached to the expense.
Args:
expense_id(str): Expense id.
Returns:
str: Success message('The attached expense receipt has been
deleted.').
"""
url = base_url + expense_id + '/receipt'
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/ExpensesApi.py",
"copies": "1",
"size": "6395",
"license": "mit",
"hash": 436793063607489660,
"line_mean": 28.4700460829,
"line_max": 80,
"alpha_frac": 0.5390148554,
"autogenerated": false,
"ratio": 4.297715053763441,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.02368046599667038,
"num_lines": 217
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.JournalsParser import JournalsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'journals/'
zoho_http_client = ZohoHttpClient()
parser = JournalsParser()
class JournalsApi:
"""Journals Api class is used to:
1.Get journals list.
2.Get the details of the journal.
3.Create a journal.
4.Updates the journal with given information.
5.Deletes the given journal.
"""
def __init__(self, authtoken, organization_id):
"""Initialize parameters for Journals Api.
Args:
authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_journals(self, parameter=None):
"""Get Journals list.
Args:
parameter(dict): Filter with which the list has to be displayed.
Returns:
instance: Journals List object.
"""
resp = zoho_http_client.get(base_url, self.details, parameter)
return parser.get_list(resp)
def get(self, journal_id):
"""Get details of a journal.
Args:
journal_id(str): Journal id.
Returns:
instance: Journals object.
"""
url = base_url + journal_id
resp = zoho_http_client.get(url, self.details)
return parser.get_journal(resp)
def create(self, journal):
"""Create a journal.
Args:
journal(instance): Journal object.
Returns:
instance: Journal object.
"""
json_object = dumps(journal.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_journal(resp)
def update(self, journal_id, journal):
"""Update an existing journal.
Args:
journal_id(str): Journal id.
journal(instance): Journal object.
Returns:
instance: Journal object.
"""
url = base_url + journal_id
json_object = dumps(journal.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_journal(resp)
def delete(self, journal_id):
"""Delete the given journal.
Args:
journal_id(str): Journal id.
"""
url = base_url + journal_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/JournalsApi.py",
"copies": "1",
"size": "2779",
"license": "mit",
"hash": 5298208345516670000,
"line_mean": 24.7314814815,
"line_max": 76,
"alpha_frac": 0.5674703131,
"autogenerated": false,
"ratio": 4.039244186046512,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5106714499146512,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.ProjectsParser import ProjectsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'projects/'
zoho_http_client = ZohoHttpClient()
parser = ProjectsParser()
class ProjectsApi:
"""This class is used to
1. List all projects with pagination.
2.Get details of a project.
3.Create a new project.
4.Update an existing project.
5.Delete an existing project.
6.Mark a project as active.
7.Mark a project as inactive.
8.Clone a project.
9.Get list of tasks added to a project.
10.Get details of a task.
11.Add task to a project.
12.Update details of a task.
13.Delete a task added to a project.
14.Get list of users associated with a project.
15.Get details of a user in a project.
16.Assign users to a project.
17.Invite and add user to the project.
18.Update details of a user.
19.Remove user from the project.
20.List all time entries with pagination.
21.Get details of a time entry.
22.Logging time entries.
23.Update logged time entry.
24.Deleting logged time entry.
25.Deleting time entries.
26.Get current running timer.
27.Start tracking time spent.
28.Stop tracking time.
29.Get comments for a project.
30.Post comment to a project.
31.Delete a comment from a project.
32.List invoices created for this project.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Projects api using user's authtoken and organization id.
Args:
authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_projects(self, parameters=None):
"""List all projects with pagination.
Args:
parameters(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Projects list object.
"""
resp = zoho_http_client.get(base_url, self.details, parameters)
return parser.get_projects_list(resp)
def get_project(self, project_id):
"""Get details of a project.
Args:
project_id(str): Project id.
Returns:
instance: Project object.
"""
url = base_url + project_id
resp = zoho_http_client.get(url, self.details)
return parser.get_project(resp)
def create_project(self, project):
"""Create a new project.
Args:
project(instance): Project object.
Returns:
instance: Projects object
"""
json_object = dumps(project.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_project(resp)
def update_project(self, project_id, project):
"""Update an existing project.
Args:
project_id(str): Project id.
project(instance): Project object.
Returns:
instance: Project object.
"""
url = base_url + project_id
json_object = dumps(project.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_project(resp)
def delete_project(self, project_id):
"""Delete an existing project.
Args:
project_id(str): Project id.
Returns:
str: Success message('The project has been deleted.').
"""
url = base_url + project_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def activate_project(self, project_id):
"""Mark project as active.
Args:
project_id(str): Project id.
Returns:
str: Success message('The selected project have been marked as
active.').
"""
url = base_url + project_id + '/active'
resp = zoho_http_client.post(url, self.details, '')
return parser.get_message(resp)
def inactivate_project(self, project_id):
"""Mark project as inactive.
Args:
project_id(str): Project id.
Returns:
str: Success message('The selected project have been marked as
inactive.').
"""
url = base_url + project_id + '/inactive'
resp = zoho_http_client.post(url, self.details, '')
return parser.get_message(resp)
def clone_project(self, project_id, project):
"""Cloning a project.
Args:
project_id(str): Project id.
Returns:
instance: Project object.
"""
url = base_url + project_id + '/clone'
json_object = dumps(project.to_json())
data = {
'JSONString': json_object
}
print data
resp = zoho_http_client.post(url, self.details, data)
return parser.get_project(resp)
def get_tasks(self, project_id, sort_column=None):
"""Get list of tasks added to a project.
Args:
project_id(str): Project id.
sort_column(str): Sort column. Allowed Values: task_name,
billed_hours, log_time and un_billed_hours.
Returns:
instance: Task list object.
"""
url = base_url + project_id + '/tasks'
if sort_column is not None:
parameter = {
'sort_column': sort_column
}
else:
parameter = None
resp = zoho_http_client.get(url, self.details, parameter)
return parser.get_tasks_list(resp)
def get_task(self, project_id, task_id):
"""Get details of a task.
Args:
project_id(str): Project id.
task_id(str): Task id.
Returns:
instance: Task object.
"""
url = base_url + project_id + '/tasks/' + task_id
resp = zoho_http_client.get(url, self.details)
return parser.get_task(resp)
def add_task(self, project_id, task):
"""Add task to project.
Args:
project_id(str): Project id.
task(instance): Task object.
Returns:
instance: Task object.
"""
url = base_url + project_id + '/tasks'
json_object = dumps(task.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_task(resp)
def update_task(self, project_id, task_id, task):
"""Update details of a task.
Args:
project_id(str): Project id.
task_id(str): Task id.
Returns:
instance: Task object.
"""
url = base_url + project_id + '/tasks/' + task_id
json_object = dumps(task.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_task(resp)
def delete_task(self, project_id, task_id):
"""Delete task added to a project.
Args:
project_id(str): Project id.
task_id(str): Task id.
Returns:
str: Success message('The task has been deleted.').
"""
url = base_url + project_id + '/tasks/' + task_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def get_users(self, project_id):
"""Get list of users associated with a project.
Args:
project_id(str): Project id.
Returns:
instance: Users list object.
"""
url = base_url + project_id + '/users'
resp = zoho_http_client.get(url, self.details)
return parser.get_users(resp)
def get_user(self, project_id, user_id):
"""Get details of a user in a project.
Args:
project_id(str): Project id.
user_id(str): User id.
Returns:
instance: USer object.
Raises:
Books Exception: If status is not '200' or '201'.
"""
url = base_url + project_id + '/users/' + user_id
resp = zoho_http_client.get(url, self.details)
return parser.get_user(resp)
def assign_users(self, project_id, users):
"""Assign users to a project.
Args:
project_id(str): Project id.
users(list of instance): List of users object.
Returns:
instance: Users list object.
"""
url = base_url + project_id + '/users'
users_obj = {
'users': []
}
for value in users:
user = value.to_json()
users_obj['users'].append(user)
json_object = dumps(users_obj)
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_users(resp)
def invite_user(self, project_id, user):
"""Invite and add user to the project.
Args:
project_id(str): Project id.
user(instance): User object.
Returns:
instance: User object.
"""
url = base_url + project_id + '/users/invite'
print user.to_json()
json_object = dumps(user.to_json())
data = {
'JSONString': json_object
}
print data
resp = zoho_http_client.post(url, self.details, data)
return parser.get_user(resp)
def update_user(self, project_id, user_id, user):
"""Update details of a user.
Args:
project_id(str): Project id.
user_id(str): User id.
user(instance): User object.
Returns:
instance: User object.
"""
url = base_url + project_id + '/users/' +user_id
json_object = dumps(user.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_user(resp)
def delete_user(self, project_id, user_id):
"""Remove user from the project.
Args:
project_id(str): Project id.
user_id(str): User id.
Returns:
str: Success message('The staff has been removed.').
"""
url = base_url + project_id + '/users/' + user_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def get_time_entries(self, parameters=None):
"""List all time entries with pagination.
Args:
parameters(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Time entries list object.
"""
url = base_url + 'timeentries'
resp = zoho_http_client.get(url, self.details, parameters)
return parser.get_time_entries_list(resp)
def get_time_entry(self, time_entry_id):
"""Get details of time entry.
Args:
time_entry_id(str): Time entry id.
Returns:
instance: Time entry object.
"""
url = base_url + 'timeentries/' + time_entry_id
resp = zoho_http_client.get(url, self.details)
return parser.get_time_entry(resp)
def log_time_entries(self, time_entry):
"""Logging time entries.
Args:
time_entry(instance): Time entry object.
Returns:
instance: Time entry.
"""
url = base_url + 'timeentries'
json_object = dumps(time_entry.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_time_entry(resp)
def update_time_entry(self, time_entry_id, time_entry):
"""Update logged time entry.
Args:
time_entry_id(str): Time entry id.
time_entry(instance): Time entry object.
Returns:
instance: Time entry object.
"""
url = base_url + 'timeentries/' + time_entry_id
json_object = dumps(time_entry.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_time_entry(resp)
def delete_time_entry(self, time_entry_id):
"""Delete time entry.
Args:
time_entry_id(str): Time entry id.
Returns:
str: Success message('The time entry has been deleted.').
"""
url = base_url + 'timeentries/' + time_entry_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def delete_time_entries(self, time_entry_ids):
"""Delete time entries.
Args:
time_entry_ids(str): Id of the time entries to be deleted.
Returns:
str: Success message('The selected timesheet entries have been
deleted.').
"""
url = base_url + 'timeentries'
param = {
'time_entry_ids': time_entry_ids
}
resp = zoho_http_client.delete(url, self.details, param)
return parser.get_message(resp)
def get_timer(self):
"""Get current running timer.
Returns:
instance: Time entry object.
Raises:
Books Exception: If status is not '200' or '201'.
"""
url = base_url + 'timeentries/runningtimer/me'
resp = zoho_http_client.get(url, self.details)
return parser.get_time_entry(resp)
def start_timer(self, timer_entry_id):
"""Start tracking time spent.
Args:
timer_entry_id(str): Timer entry id.
Returns:
instance: Time entry object.
"""
url = base_url + 'timeentries/' + timer_entry_id + '/timer/start'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_time_entry(resp)
def stop_timer(self):
"""Stop tracking time.
Returns:
instance: Time entry object.
"""
url = base_url + 'timeentries/timer/stop'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_time_entry(resp)
def get_comments(self, project_id):
"""Get list of comments for a project.
Args:
project_id(str): Project id.
Returns:
instance: Comments list object.
"""
url = base_url + project_id + '/comments'
resp = zoho_http_client.get(url, self.details)
return parser.get_comments(resp)
def post_comment(self, project_id, comment):
"""Post comment to a project.
Args:
project_id(str): Project id.
comment(instance): Comment object.
Returns:
instance: Comment object.
"""
url = base_url + project_id + '/comments'
data = {
'description': comment.get_description()
}
json_string = {
'JSONString': dumps(data)
}
resp = zoho_http_client.post(url, self.details, json_string)
return parser.get_comment(resp)
def delete_comment(self, project_id, comment_id):
"""Delete an existing comment.
Args:
project_id(str): Project id.
comment_id(str): comment id.
Returns:
str: Success message('The comment has been deleted.').
"""
url = base_url + project_id + '/comments/' + comment_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def get_invoices(self, project_id, sort_column=None):
"""List invoices created for this project.
Args:
project_id(str): Project id.
Returns:
instance: Invoices list object.
"""
url = base_url + project_id + '/invoices'
if sort_column is not None:
param = {
'sort_column': sort_column
}
else:
param = None
resp = zoho_http_client.get(url, self.details, param)
return parser.get_invoice_list(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/ProjectsApi.py",
"copies": "1",
"size": "16913",
"license": "mit",
"hash": 1925660577431593500,
"line_mean": 26.7717569787,
"line_max": 78,
"alpha_frac": 0.5443741501,
"autogenerated": false,
"ratio": 4.04520449653193,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.508957864663193,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.RecurringExpensesParser import RecurringExpensesParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'recurringexpenses/'
parser = RecurringExpensesParser()
zoho_http_client = ZohoHttpClient()
class RecurringExpensesApi:
"""Recurring Expenses Api is used to
1.List recurring expenses with pagination .
2.Get the details of a recurring expense.
3.Create a recurring expense.
4.Update an existing recurring expense.
5.Delete an existing recurring expense.
6.Stop an active recurring expense.
7.Resume a stopped recurring expense.
8.List child expenses created from recurring expense.
9.Get history and comments of a recurring expense.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Recurring Expenses Api using user's authtoken and
organization id.
Args:
authotoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_recurring_expenses(self, param=None):
"""List recurring expenses with pagination.
Args:
parameter(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Recurring expenses list object.
"""
resp = zoho_http_client.get(base_url, self.details, param)
return parser.get_list(resp)
def get(self, recurring_expense_id):
"""Get details of a recurring expense.
Args:
recurring_expense_id: Recurring expense id.
Returns:
instance: Recurring expense object.
"""
url = base_url + recurring_expense_id
resp = zoho_http_client.get(url, self.details)
return parser.get_recurring_expense(resp)
def create(self, recurring_expenses):
"""Create a recurring expense.
Args:
recurring_expenses(instance): Recurring expense object.
Returns:
instance: Recurring expense object.
"""
json_object = dumps(recurring_expenses.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_recurring_expense(resp)
def update(self, recurring_expense_id, recurring_expenses):
"""Update an existing recurring expense.
Args:
recurring_expense_id(str): Recurring expense id.
recurring_expenses(instance): Recurring expense object.
Returns:
instance: Recurring expense object.
"""
url = base_url + recurring_expense_id
json_object = dumps(recurring_expenses.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_recurring_expense(resp)
def delete(self, recurring_expense_id):
"""Delete an existing recurring expense.
Args:
recurring_expense_id(str): Recurring expense id.
Returns:
str: Success message('The recurring expense has been deleted.').
"""
url = base_url + recurring_expense_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def stop_recurring_expense(self, recurring_expense_id):
"""Stop an active recurring expense.
Args:
recurring_expense_id(str): Recurring expense id.
Returns:
str: Success message('The recurring expense has been stopped.').
"""
url = base_url + recurring_expense_id + '/status/stop'
resp = zoho_http_client.post(url, self.details, '')
return parser.get_message(resp)
def resume_recurring_expense(self, recurring_expense_id):
"""Resume a stopped recurring expense.
Args:
recurring_expense_id(str): Recurring expense id.
Returns:
str: Success message('Resume a stopped recurring expense.').
"""
url = base_url + recurring_expense_id + '/status/resume'
resp = zoho_http_client.post(url, self.details, '')
return parser.get_message(resp)
def list_child_expenses_created(self, recurring_expense_id, parameter=None):
"""List child expenses created.
Args:
recurring_expense_id(str): Recurring Expense id.
sort_column(str, optional): Sort child expenses created. Allowed
values are date, account_name, vendor_name,
paid_through_account_name, customer_name and total.
Returns:
instance: Expense history object.
"""
url = base_url + recurring_expense_id + '/expenses'
if parameter is not None:
query = {
'sort_column': parameter
}
else:
query = None
resp = zoho_http_client.get(url, self.details, query)
return parser.get_expense_history(resp)
def list_recurring_expense_comments_and_history(self, recurring_expense_id):
"""Get history and comments of recurring expense.
Args:
recurring_expense_id(str): Recurring expense id.
Returns:
instance: Comments list object.
"""
url = base_url + recurring_expense_id + '/comments'
resp = zoho_http_client.get(url, self.details)
return parser.get_comments(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/RecurringExpensesApi.py",
"copies": "1",
"size": "5752",
"license": "mit",
"hash": -6827977661880919000,
"line_mean": 30.4316939891,
"line_max": 80,
"alpha_frac": 0.6081363004,
"autogenerated": false,
"ratio": 4.079432624113475,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5187568924513475,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.RecurringInvoiceParser import RecurringInvoiceParser
from Api import Api
from json import dumps
base_url = Api().base_url + 'recurringinvoices/'
parser = RecurringInvoiceParser()
zoho_http_client = ZohoHttpClient()
class RecurringInvoicesApi:
"""Recurring invoice api class is used:
1.To list all the recurring invoices with pagination.
2.To get details of a recurring invoice.
3.To create a recurring invoice.
4.To update an existing recurring invoice.
5.To delete an existing recurring invoice.
6.To stop an active recurring invoice.
7.To resume a stopped recurring invoice.
8.To update the pdf template associated with the recurring invoice.
9.To get the complete history and comments of a recurring invoice.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Contacts Api using user's authtoken and organization id.
Args:
authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_recurring_invoices(self, parameter=None):
"""List of recurring invoices with pagination.
Args:
parameter(dict, optional): Filter with which the list has to be
displayed. Defaults to None.
Returns:
instance: Recurring invoice list object.
"""
response = zoho_http_client.get(base_url, self.details, parameter)
return parser.recurring_invoices(response)
def get_recurring_invoice(self, recurring_invoice_id):
"""Get recurring invoice details.
Args:
recurring_invoice_id(str): Recurring invoice id.
Returns:
instance: Recurring invoice object.
"""
url = base_url + recurring_invoice_id
response = zoho_http_client.get(url, self.details)
return parser.recurring_invoice(response)
def create(self, recurring_invoice):
"""Create recurring invoice.
Args:
recurring_invoice(instance): Recurring invoice object.
Returns:
instance: Recurring invoice object.
"""
json_object = dumps(recurring_invoice.to_json())
data = {
'JSONString': json_object
}
response = zoho_http_client.post(base_url, self.details, data)
return parser.recurring_invoice(response)
def update(self, recurring_invoice_id, recurring_invoice):
"""Update an existing recurring invoice.
Args:
recurring_invoice_id(str): Recurring invoice id.
recurring_invoice(instance): Recurring invoice object.
Returns:
instance: Recurring invoice object.
"""
url = base_url + recurring_invoice_id
json_object = dumps(recurring_invoice.to_json())
data = {
'JSONString': json_object
}
response = zoho_http_client.put(url, self.details, data)
return parser.recurring_invoice(response)
def delete(self, recurring_invoice_id):
"""Delete an existing recurring invoice.
Args:
recurring_invoice_id(str): Recurring invoice id.
Returns:
str: Success message('The recurring invoice has been deleted.').
"""
url = base_url + recurring_invoice_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def stop_recurring_invoice(self, recurring_invoice_id):
"""Stop an active recurring invoice.
Args:
recurring_invoice_id(str): Recurring invoice id.
Returns:
str: Success message ('The recurring invoice has been stopped.').
"""
url = base_url + recurring_invoice_id + '/status/stop'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def resume_recurring_invoice(self, recurring_invoice_id):
"""Resume an active recurring invoice.
Args:
recurring_invoice_id(str): Recurring invoice id.
Returns:
str: Success message ('The recurring invoice has been activated.').
"""
url = base_url + recurring_invoice_id + '/status/resume'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def update_recurring_invoice_template(self,
recurring_invoice_id, template_id):
"""Update the pdf template associated with the recurring invoice.
Args:
recurring_invoice_id(str): Recurring invoice id.
template_id(str): Template id.
Returns:
str: Success message ('Recurring invoice information has been
updated.').
"""
url = base_url + recurring_invoice_id + '/templates/' + template_id
response = zoho_http_client.put(url, self.details, '')
return parser.get_message(response)
def list_recurring_invoice_history(self, recurring_invoice_id):
"""List the complete history and comments of a recurring invoice.
Args:
recurring_invoice_id(str): Recurring invoice id.
Returns:
instance: Recurring invoice history and comments list object.
"""
url = base_url + recurring_invoice_id + '/comments'
response = zoho_http_client.get(url, self.details)
return parser.recurring_invoice_history_list(response)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/RecurringInvoicesApi.py",
"copies": "1",
"size": "5822",
"license": "mit",
"hash": -8886411586533889000,
"line_mean": 31.7078651685,
"line_max": 79,
"alpha_frac": 0.61370663,
"autogenerated": false,
"ratio": 4.274596182085169,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.538830281208517,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.SettingsParser import SettingsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'items/'
zoho_http_client = ZohoHttpClient()
parser = SettingsParser()
class ItemsApi:
"""Items Api class is used to
1.List items.
2.Get an item.
3.Create an item.
4.Update an item.
5.Delete an item.
6.Mark item as active.
7.Mark item as inactive.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Settings Api using authtoken and organization id.
Args:
authtoken(str): User's Authtoken.
organization_id(str): User's Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def list_items(self):
"""Get the list of all active items with pagination.
Returns:
instance: Items list object.
"""
resp = zoho_http_client.get(base_url, self.details)
return parser.get_items(resp)
def get(self, item_id):
"""Get details of an item.
Args:
item_id(str): Item id.
Returns:
instance: Item object.
"""
url = base_url + item_id
resp = zoho_http_client.get(url, self.details)
return parser.get_item(resp)
def create(self, item):
"""Create a new item.
Args:
item(instance): Item object.
Returns:
instance: Item object.
"""
json_obj = dumps(item.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_item(resp)
def update(self, item_id, item):
"""Update an existing item.
Args:
item_id(str): Item id.
item(instance): Item object.
Returns:
instance: Item object.
"""
url = base_url + item_id
json_obj = dumps(item.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_item(resp)
def delete_item(self, item_id):
"""Delete an item.
Args:
item_id(str): Item id.
Returns:
str: Success message('The item has been deleted.').
"""
url = base_url + item_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def mark_item_as_active(self, item_id):
"""Mark an item as active.
Args:
item_id(str): Item id.
Returns:
str: Success message('The item has been marked as active.').
"""
url = base_url + item_id + '/active'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def mark_item_as_inactive(self, item_id):
"""Mark an item as inactive.
Args:
item_id(str): Item id.
Returns:
str: Success message('The item has been marked as inactive.').
"""
url = base_url + item_id + '/inactive'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/ItemsApi.py",
"copies": "1",
"size": "3555",
"license": "mit",
"hash": 1078325628554547500,
"line_mean": 24.0352112676,
"line_max": 74,
"alpha_frac": 0.5341772152,
"autogenerated": false,
"ratio": 3.902305159165752,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4936482374365752,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.SettingsParser import SettingsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'organizations/'
zoho_http_client = ZohoHttpClient()
parser = SettingsParser()
class OrganizationsApi:
"""Organization api is used to
1.List organizations.
2.Get the details of an organization.
3.Create an organization.
4.Update an organization.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Settings Api using authtoken and organization id.
Args:
authtoken(str): User's Authtoken.
organization_id(str): User's Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_organizations(self):
"""Get list of all organizations.
Returns:
instance: Organizations list object.
"""
resp = zoho_http_client.get(base_url, self.details)
return parser.get_organizations(resp)
def get(self, organization_id):
"""Get organization id.
Args:
organization_id(str): Organization id.
Returns:
instance: Organization object.
"""
url = base_url + organization_id
resp = zoho_http_client.get(url, self.details)
return parser.get_organization(resp)
def create(self, organization):
"""Create an organization.
Args:
organization(instance): Organization object.
Returns:
instance: Organization object.
"""
json_obj = dumps(organization.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_organization(resp)
def update(self, organization_id, organization):
"""Update organization.
Args:
organization_id(str): ORganization id.
organization(instance): Organization object.
Returns:
instance: Organization object.
"""
url = base_url + organization_id
json_obj = dumps(organization.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_organization(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/OrganizationsApi.py",
"copies": "1",
"size": "2468",
"license": "mit",
"hash": -33816613064295616,
"line_mean": 25.8260869565,
"line_max": 71,
"alpha_frac": 0.5964343598,
"autogenerated": false,
"ratio": 4.314685314685315,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5411119674485315,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.SettingsParser import SettingsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'settings/'
zoho_http_client = ZohoHttpClient()
parser = SettingsParser()
class SettingsApi:
"""This class is used to
1.List preferences that are configured.
2.Update the preferences that has been configured.
3.Create a unit that can be associated to a line item.
4.Delete a unit that has been associated to a line item.
5.Get the details of invoice settings.
6.Update the settings information of invoice.
7.Get the details of notes and terms.
8.Update invoice notes and terms.
9.Get estimate settings.
10.Update estimate settings.
11.Get estimate notes and terms.
12.Update estimate notes and terms.
13.List creditnotes settings.
14.Update creditnotes settings.
15.Get creditnote notes and term
16.Update creditnote notes and terms.
17.List currencies.
18.Get details of a currency.
19.Create a currency.
20.Update a currency.
21.Delete a currency.
22.List exchange rates.
23.Get an exchange rate.
24.Create an exchange rate.
25.Update an exchange rate.
26.Delete an exchange rate.
27.List taxes.
28.Get details of a tax.
29.Create a tax.
30.Update a tax.
31.Delete a tax.
32.Get a tax group.
33.Create a tax group.
34.Update a tax group.
35.Delete a tax group.
36.Get Opening balance.
37.Create opening balance.
38.Update opening balance.
39.Delete opening balance.
40.List auto payment reminder.
41.Get an auto payment reminder.
42.Enable auto reminder.
43.Disable auto reminder.
44.Update an auto reminder.
45.List manual reminders.
46.Get a manual reminder.
47.Update a manual reminder.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Settings Api using authtoken and organization id.
Args:
authtoken(str): User's Authtoken.
organization_id(str): User's Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def list_preferences(self):
"""List of preferences that are configured.
Returns:
instance: Preference list object.
"""
url = base_url + 'preferences'
resp = zoho_http_client.get(url, self.details)
return parser.preference_list(resp)
def update_preferences(self, preference):
"""Update preference.
Args:
preference(instance): Preference object.
Returns:
str: Success message('Preferences have been saved.').
"""
url = base_url + 'preferences'
json_object = dumps(preference.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_message(resp)
def create_unit(self, unit):
"""Create a unit that can be associated to a line item.
Args:
unit(str): Unit. Eg: Kg.
Returns:
str: Success message('Unit added.').
"""
url = base_url + 'units'
data = {
'unit': unit
}
json_string = {
'JSONString': dumps(data)
}
resp = zoho_http_client.post(url, self.details, json_string)
return parser.get_message(resp)
def delete_unit(self, unit_id):
"""Delete a unit that has been associated to an item.
Args:
unit_id(str): Unit id.
Returns:
str: Success message('You have successfully deleted the unit.').
"""
url = base_url + 'units/' + unit_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def get_invoice_settings(self):
"""Get the details of invoice settings.
Returns:
instance: Invoice settings object.
"""
url = base_url + 'invoices'
resp = zoho_http_client.get(url, self.details)
return parser.get_invoice_settings(resp)
def update_invoice_settings(self, invoice_settings):
"""Update the settings information for invoices.
Args:
invoice_settings(instance): Invoice settings.
Returns:
instance: Invoice settings object.
"""
url = base_url + 'invoices'
json_object = dumps(invoice_settings.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_invoice_settings(resp)
def get_invoice_notes_and_terms(self):
"""Get the details of invoce notes and terms.
Returns:
instance: Notes and terms object.
"""
url = base_url + 'invoices/notesandterms'
resp = zoho_http_client.get(url, self.details)
return parser.get_notes_and_terms(resp)
def update_invoice_notes_and_terms(self, invoice_notes_and_terms):
"""Update the notes and terms for an invoice.
Args:
invoice_notes_and_terms(instance): Invoice notes and terms object.
Returns:
instance: Invoice notes and terms object.
"""
url = base_url + 'invoices/notesandterms'
json_object = dumps(invoice_notes_and_terms.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_notes_and_terms(resp)
def get_estimate_settings(self):
"""Get estimate settings.
Returns:
instance: Estimate settings.
"""
url = base_url + 'estimates'
resp = zoho_http_client.get(url, self.details)
return parser.get_estimate_settings(resp)
def update_estimate_settings(self, estimate_setting):
"""Update estimate settings.
Args:
estimate_setting(instance): Estimate setting object.
Returns:
instance: Estiamte setting object.
"""
url = base_url + 'estimates'
json_object = dumps(estimate_setting.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_estimate_settings(resp)
def get_estimates_notes_and_terms(self):
"""Get estimates notes and terms.
Returns:
instance: notes and terms object.
"""
url = base_url + 'estimates/notesandterms'
resp = zoho_http_client.get(url, self.details)
return parser.get_notes_and_terms(resp)
def update_estimates_notes_and_terms(self, notes_and_terms):
"""Update estimate notes and terms.
Args:
notes_and_terms(instance): Notes and terms object.
Returns:
instance: Estimates notes and terms.
"""
url = base_url + 'estimates/notesandterms'
json_object = dumps(notes_and_terms.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_notes_and_terms(resp)
def list_creditnote_settings(self):
"""List creditnotes settings.
Returns:
instance: creditnotes settings.
"""
url = base_url + 'creditnotes'
resp = zoho_http_client.get(url, self.details)
return parser.get_creditnote_settings(resp)
def update_creditnote_settings(self, creditnotes_settings):
"""Update creditnotes settings.
Args:
creditnotes_settings(instance): Creditnotes settings object.
Returns:
instance: Creditnotes settings object.
"""
url = base_url + 'creditnotes'
json_object = dumps(creditnotes_settings.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_creditnote_settings(resp)
def get_creditnote_notes_and_terms(self):
"""Get creditnotes notes and terms.
Returns:
instance: Creditnotes settings object.
"""
url = base_url + 'creditnotes/notesandterms'
resp = zoho_http_client.get(url, self.details)
return parser.get_notes_and_terms(resp)
def update_creditnote_notes_and_terms(self, notes_and_terms):
"""Update creditnotes notes and terms.
Args:
notes_and_terms(instance): Notes and terms object.
Returns:
instance: Notes and terms object.
"""
url = base_url + 'creditnotes/notesandterms'
json_object = dumps(notes_and_terms.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_notes_and_terms(resp)
def get_currencies(self, param=None):
"""List of configured currencies with pagination.
Args:
param(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Currency list object.
"""
url = base_url + 'currencies'
if param is not None:
data = {
'filter_by': param
}
else:
data = None
resp = zoho_http_client.get(url, self.details, data)
return parser.get_currencies(resp)
def get_currency(self, currency_id):
"""Get the details of a currency.
Args:
currency_id(str): Currency id.
Returns:
instance: Currency object.
"""
url = base_url + 'currencies/' + currency_id
resp = zoho_http_client.get(url, self.details)
return parser.get_currency(resp)
def create_currency(self, currency):
"""Create a currency for transactions.
Args:
currency(instance): Currency object.
Returns:
instance: Currency object.
"""
url = base_url + 'currencies'
json_obj = dumps(currency.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_currency(resp)
def update_currency(self, currency_id, currency):
"""Update the details of currency.
Args:
currency_id(str): Currency id.
currency(instance): Currency object.
Returns:
instance: Currecny object.
"""
url = base_url + 'currencies/' + currency_id
json_obj = dumps(currency.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_currency(resp)
def delete_currency(self, currency_id):
"""Delete currency.
Args:
currency_id(str): Currency id.
Returns:
str: Success message('The currency has been deleted.').
"""
url = base_url + 'currencies/' + currency_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def list_exchange_rates(self, currency_id, param=None):
"""List of exchange rates configured for the currency.
Args:
param(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Exchange rate list object.
"""
url = base_url + 'currencies/' + currency_id + '/exchangerates'
resp = zoho_http_client.get(url, self.details, param)
return parser.get_exchange_rates(resp)
def get_exchange_rate(self, currency_id, exchange_rate_id):
"""Get details of an exchange rate that has been associated to the
currency.
Args:
currency_id(str): Currency id.
exchange_rate_id(str): Exchange rate id.
Returns:
instance: Exchange rate object.
"""
url = base_url + 'currencies/' + currency_id + '/exchangerates/' + \
exchange_rate_id
resp = zoho_http_client.get(url, self.details)
return parser.get_exchange_rate(resp)
def create_exchange_rate(self, exchange_rate):
"""Create an exchange rate for the specified currency.
Args:
currency_id(str): Currency id.
exchange_rate(instance): Exchange rate object.
Returns:
instance: Exchange rate object.
"""
url = base_url + 'currencies/' + exchange_rate.get_currency_id() + \
'/exchangerates'
json_obj = dumps(exchange_rate.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_exchange_rate(resp)
def update_exchange_rate(self, exchange_rate):
"""Update the details of exchange rate currency.
Args:
exchange_rate(instance): Exchnage rate object.
Returns:
str: Success message('The exchange rate has been update.').
"""
url = base_url + 'currencies/' + exchange_rate.get_currency_id() + \
'/exchangerates/' + exchange_rate.get_exchange_rate_id()
json_obj = dumps(exchange_rate.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_message(resp)
def delete_exchange_rate(self, currency_id, exchange_rate_id):
"""Delete an exchnage rate for the specified currency.
Args:
currency_id(str): Currency id.
exchange_rate_id(str): Exchange rate id.
Returns:
str: Success message('Exchange rate successfully deleted.').
"""
url = base_url + 'currencies/' + currency_id + '/exchangerates/' + \
exchange_rate_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def get_taxes(self):
"""List of taxes with pagination.
Returns:
instance: Tax list object.
"""
url = base_url + 'taxes'
resp = zoho_http_client.get(url, self.details)
return parser.get_taxes(resp)
def get_tax(self, tax_id):
"""Get details of a tax.
Args:
tax_id(str): Tax id.
Returns:
instance: Tax object.
"""
url = base_url + 'taxes/' + tax_id
resp = zoho_http_client.get(url, self.details)
return parser.get_tax(resp)
def create_tax(self, tax):
"""Create tax.
Args:
tax(instance): Tax object.
Returns:
instance: Tax object.
"""
url = base_url + 'taxes'
json_obj = dumps(tax.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_tax(resp)
def update_tax(self, tax_id, tax):
"""Update the details of tax.
Args:
tax_id(str): Tax id.
tax(instance): Tax object.
Returns:
instance: Tax object.
"""
url = base_url + 'taxes/' + tax_id
json_obj = dumps(tax.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_tax(resp)
def delete_tax(self, tax_id):
"""Delete tax.
Args:
tax_id(str): Tax id.
Returns:
str: Success message('The record has been deleted.').
"""
url = base_url + 'taxes/' + tax_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def get_tax_group(self, tax_group_id):
"""Get details of a tax group with associated taxes.
Args:
tax_group_id(str): Tax group id.
Returns:
instance: Tax group object.
"""
url = base_url + 'taxgroups/' + tax_group_id
resp = zoho_http_client.get(url, self.details)
return parser.get_tax_group(resp)
def create_tax_group(self, tax_group):
"""Create a tax group associating multiple taxes.
Args:
tax_group_name(str): Tax group name.
taxes(str): List of tax ids associated to the tax group.
Returns:
instance: Tax group object.
"""
url = base_url + 'taxgroups'
json_obj = dumps(tax_group.to_json())
data = {
'JSONString': json_obj
}
print data
resp = zoho_http_client.post(url, self.details, data)
return parser.get_tax_group(resp)
def update_tax_group(self, tax_group):
"""Update tax group.
Args:
tax_group_id(str): Tax group id.
tax_group_name(str): Tax group name.
taxes(str): List of tax ids associated to that tax group.
Returns:
instance: Tax group object.
"""
url = base_url + 'taxgroups/' + tax_group.get_tax_group_id()
json_obj = dumps(tax_group.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_message(resp)
def delete_tax_group(self, tax_group_id):
"""Delete tax group.
Args:
tax_group_id(str): Tax group id.
Returns:
str: Success message('The tax group has been deleted.').
"""
url = base_url + 'taxgroups/' + tax_group_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def get_opening_balance(self):
"""Get opening balance.
Returns:
instance: Opening balance object.
"""
url = base_url + 'openingbalances'
resp = zoho_http_client.get(url, self.details)
return parser.get_opening_balance(resp)
def create_opening_balance(self, opening_balance):
"""Create opening balance.
Args:
opening_balance(instance): Opening balance object.
Returns:
instance: Opening balance object.
"""
url = base_url + 'openingbalances'
json_object = dumps(opening_balance.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_opening_balance(resp)
def update_opening_balance(self, opening_balance):
"""Update opening balance.
Args:
opening_balance(instance): Opening balance object.
Returns:
instance: Opening balance object.
"""
url = base_url + 'openingbalances'
json_object = dumps(opening_balance.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_opening_balance(resp)
def delete_opening_balance(self):
"""Delete the entered opening balance.
Returns:
str: Success message('The entered opening balance has been
deleted.').
"""
url = base_url + 'openingbalances'
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def list_auto_payment_reminder(self):
"""List auto payment reminder.
Returns:
instance: Autoreminders list object.
"""
url = base_url + 'autoreminders'
resp = zoho_http_client.get(url, self.details)
return parser.get_autoreminders(resp)
def get_auto_payment_reminder(self, reminder_id):
"""Get auto payment reminder.
Args:
reminder_id(str): Reminder id.
Returns:
instance: Auto payment reminder object.
"""
url = base_url + 'autoreminders/' + reminder_id
resp = zoho_http_client.get(url, self.details)
return parser.get_autoreminder(resp)
def enable_auto_reminder(self, reminder_id):
"""Enable automated payment reminder.
Args:
reminder_id(str): Reminder id.
Returns:
str: Success message('Payment reminder has been enabled.').
"""
url = base_url + 'autoreminders/' + reminder_id + '/enable'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def disable_auto_reminder(self, reminder_id):
"""Disable automated payment reminder.
Args:
reminder_id(str): Reminder id.
Returns:
str: Success message('Payment reminder has been disabled.').
"""
url = base_url + 'autoreminders/' + reminder_id + '/disable'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def update_auto_reminder(self, reminder_id, autoreminder):
"""Update an auto reminder.
Args:
reminder_id(str): Reminder id.
autoreminder(instance): Auto reminder object.
Returns:
str: Success message('Your payment reminder preferences have
been saved.').
"""
url = base_url + 'autoreminders/' + reminder_id
json_obj = autoreminder.to_json()
data = {
'JSONString': json_obj
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_message(resp)
def list_manual_reminders(self, type_of_reminder=None):
"""List of manual reminders.
Args:
type_of_reminder(str): Type to select between open or overdue
reminder.
Returns:
instance: Manual reminder list object.
"""
url = base_url + 'manualreminders'
if type_of_reminder is not None:
param = {
'type': type_of_reminder
}
else:
param = None
resp = zoho_http_client.get(url, self.details)
return parser.get_manual_reminders(resp)
def get_manual_reminder(self, reminder_id):
"""Get the details of a manual reminder.
Args:
manual_reminder(instance): Manual reminder object.
Returns:
instance: Manual reminder object.
"""
url = base_url + 'manualreminders/' + reminder_id
resp = zoho_http_client.get(url, self.details)
return parser.get_manual_reminder(resp)
def update_manual_reminder(self, reminder_id, manual_reminder):
"""Update manual reminder.
Args:
reminder_id(str): Reminder id.
manual_reminder(instance): Manual reminder.
Returns:
instance: Manual reminder.
"""
url = base_url + 'manualreminders/' + reminder_id
json_obj = dumps(manual_reminder.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/SettingsApi.py",
"copies": "1",
"size": "23914",
"license": "mit",
"hash": 5092538827102214000,
"line_mean": 27.8816425121,
"line_max": 78,
"alpha_frac": 0.566028268,
"autogenerated": false,
"ratio": 4.036118143459916,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.015714103808473212,
"num_lines": 828
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.SettingsParser import SettingsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'users/'
zoho_http_client = ZohoHttpClient()
parser = SettingsParser()
class UsersApi:
"""Uses Api class is used to
1.List of all users in an organization.
2.10.Get the details of an user.
3.Get the details of the current user.
4.Create an user for an organization.
5.Update the details of an existing user.
6.Delete an existing user.
7.Send invitation email to a user.
8.Mark an inactive user as active.
9.Mark an active user as inactive.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Settings Api using authtoken and organization id.
Args:
authtoken(str): User's Authtoken.
organization_id(str): User's Organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_users(self, parameters=None):
"""Get the list of users in the organization.
Args:
parameters(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Users list object.
"""
resp = zoho_http_client.get(base_url, self.details, parameters)
return parser.get_users(resp)
def get(self, user_id):
"""Get the details of a user.
Args:
user_id(str): User id.
Returns:
instance: User object.
"""
url = base_url + user_id
resp = zoho_http_client.get(url, self.details)
return parser.get_user(resp)
def current_user(self):
"""Get details of a user.
Returns:
instance: User object.
"""
url = base_url + 'me'
resp = zoho_http_client.get(url, self.details)
return parser.get_user(resp)
def create(self, user):
"""Create a user for your organization.
Args:
user(instance): User object.
Returns:
instance: User object.
"""
json_obj = dumps(user.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_user(resp)
def update(self, user_id, user):
"""Update the details of an user.
Args:
user_id(str): User id.
user(instance): User object.
Returns:
instance: User object.
"""
url = base_url + user_id
json_obj = dumps(user.to_json())
data = {
'JSONString': json_obj
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_user(resp)
def delete(self, user_id):
"""Delete a user associated to the organization.
Args:
user_id(str): User id.
Returns:
str: Success message('The user has been removed from your
organization.').
"""
url = base_url + user_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
def invite_user(self, user_id):
"""Send invitation email to a user.
Args:
user_id(str): User id.
Returns:
str: Success message('Your invitation has been sent.').
"""
url = base_url + user_id + '/invite'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def mark_user_as_active(self, user_id):
"""Mark an inactive user as active.
Args:
user_id(str): User id.
Returns:
str: Success message('The user has been marked as active.').
"""
url = base_url + user_id + '/active'
data = {
'JSONString': ''
}
resp = zoho_http_client.post(url, self.details, data)
return parser.get_message(resp)
def mark_user_as_inactive(self, user_id):
"""Mark an active user as inactive.
Args:
user_id(str): User id.
Returns:
str: Success message('The user has been marked as inactive.').
"""
url = base_url + user_id + '/inactive'
resp = zoho_http_client.post(url, self.details, '')
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/UsersApi.py",
"copies": "1",
"size": "4629",
"license": "mit",
"hash": 935682246673127000,
"line_mean": 25.6034482759,
"line_max": 77,
"alpha_frac": 0.5493627133,
"autogenerated": false,
"ratio": 3.993960310612597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5043323023912597,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.VendorPaymentsParser import VendorPaymentsParser
from books.api.Api import Api
from json import dumps
base_url = Api().base_url + 'vendorpayments/'
parser = VendorPaymentsParser()
zoho_http_client = ZohoHttpClient()
class VendorPaymentsApi:
"""Vendor Payaments Api is used to
1.List all the paymnets made to your vendor.
2.Get the details of a vendor payment.
3.Create a payment made to the vendor.
4.Update an existing vendor payment.
5.Delete an existing vendor payment.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Vendor payments api using user's authtoken and
organization id.
Args:
authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details = {
'authtoken': authtoken,
'organization_id': organization_id
}
def get_vendor_payments(self, parameter=None):
"""List all the payments made to the vendor.
Args:
parameter(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Vendor payments list object.
"""
resp = zoho_http_client.get(base_url, self.details, parameter)
return parser.get_list(resp)
def get(self, payment_id):
"""Get the details of vendor payment.
Args:
payment_id(str): Payment id.
Returns:
instance: Vendor payments object.
"""
url = base_url + payment_id
resp = zoho_http_client.get(url, self.details)
return parser.get_vendor_payment(resp)
def create(self, vendor_payments):
"""Create a payment made to vendor.
Args:
vendor_payments(instance): Vendor payments object.
Returns:
instance: Vendor payments object.
"""
json_object = dumps(vendor_payments.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.post(base_url, self.details, data)
return parser.get_vendor_payment(resp)
def update(self, payment_id, vendor_payments):
"""Update an existing vendor payment.
Args:
payment_id(str): Payment id.
vendor_payments(instance): Vendor payments object.
Returns:
instance: Vendor payments object.
"""
url = base_url + payment_id
json_object = dumps(vendor_payments.to_json())
data = {
'JSONString': json_object
}
resp = zoho_http_client.put(url, self.details, data)
return parser.get_vendor_payment(resp)
def delete(self, payment_id):
"""Delete an existing vendor payment.
Args:
payment_id(str): Payment id.
Returns:
str: Success message('The paymenet has been deleted.').
"""
url = base_url + payment_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/VendorPaymentsApi.py",
"copies": "1",
"size": "3189",
"license": "mit",
"hash": -127832632843351070,
"line_mean": 26.4913793103,
"line_max": 76,
"alpha_frac": 0.5926622766,
"autogenerated": false,
"ratio": 4.093709884467266,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5186372161067265,
"avg_score": null,
"num_lines": null
} |
# $Id$
from canary.context import Context
from canary.study import *
from canary.user import get_user_by_id
class StatCollector:
def __init__ (self, context):
self.context = context
self.handlers = []
def add_handler(self, handler):
self.handlers.append(handler)
def add_handlers(self, *handlers):
for handler in handlers:
self.add_handler(handler)
def process(self, records):
for record in records:
for handler in self.handlers:
handler.process(self.context, record)
class StatHandler:
# Define a list of keys to tally
KEYS = []
def __init__ (self, filters=[]):
self.stats = {}
self.filters = []
for keyset in self.KEYS:
for key in keyset:
self.stats[key] = 0
def get_study (self, context, record):
return self.filter(context, record)
def filter (self, context, record):
try:
# Does the study exist?
study = Study(context, record.study_id)
for filter in self.filters:
type, attr, val = filter.split(':')
if type == 'record':
if not getattr(record, attr) == val:
raise 'FilteredRecordValue'
elif type == 'study':
if not getattr(study, attr) == val:
raise 'FilteredStudyValue'
else:
raise 'InvalidFilter'
return study
except:
return None
def tally (self, item):
try:
self.stats[item] += 1
except:
self.stats[item] = 1
class CuratorHandler (StatHandler):
def process (self, context, record):
study = self.get_study(context, record)
if study:
user = get_user_by_id(context, study.curator_user_id)
self.tally(user.name)
class ArticleTypeHandler (StatHandler):
KEYS = [Study.ARTICLE_TYPES]
def process (self, context, record):
study = self.get_study(context, record)
if study:
self.tally(study.get_article_type(text=True))
class MethodologySamplingHandler (StatHandler):
KEYS = [Methodology.SAMPLING]
def process (self, context, record):
study = self.get_study(context, record)
if study:
for methodology in study.methodologies:
self.tally(methodology.get_sampling(text=True))
class MethodologyTypeHandler (StatHandler):
KEYS = [Methodology.TYPES]
def process (self, context, record):
study = self.get_study(context, record)
if study:
for methodology in study.methodologies:
self.tally(methodology.get_study_type(text=True))
class MethodologyTimingHandler (StatHandler):
KEYS = [Methodology.TIMING]
def process (self, context, record):
study = self.get_study(context, record)
if study:
for methodology in study.methodologies:
self.tally(methodology.get_timing(text=True))
class MethodologyControlHandler (StatHandler):
KEYS = [Methodology.CONTROLS]
def process (self, context, record):
study = self.get_study(context, record)
if study:
for methodology in study.methodologies:
self.tally(methodology.get_controls(text=True))
class ExposureRouteHandler (StatHandler):
KEYS = [ExposureRoute.ROUTE]
def process (self, context, record):
study = self.get_study(context, record)
if study:
for methodology in study.methodologies:
for route in methodology.get_routes(text=True):
self.tally(route)
class ExposureHandler (StatHandler):
KEYS = [Exposure.UMLS_SOURCES.values()]
def process (self, context, record):
study = self.get_study(context, record)
if study:
for exposure in study.exposures:
source = Exposure.UMLS_SOURCES[exposure.concept_source_id]
self.tally((exposure.term, exposure.concept_id))
class RiskFactorHandler (StatHandler):
KEYS = [RiskFactor.UMLS_SOURCES.values()]
def process (self, context, record):
study = self.get_study(context, record)
if study:
for risk_factor in study.risk_factors:
source = RiskFactor.UMLS_SOURCES[risk_factor.concept_source_id]
self.tally((risk_factor.term, risk_factor.concept_id))
class OutcomeHandler (StatHandler):
KEYS = [Outcome.UMLS_SOURCES.values()]
def process (self, context, record):
study = self.get_study(context, record)
if study:
for outcome in study.outcomes:
source = Outcome.UMLS_SOURCES[outcome.concept_source_id]
self.tally((outcome.term, outcome.concept_id))
class SpeciesHandler (StatHandler):
KEYS = [Species.UMLS_SOURCES.values()]
def process (self, context, record):
study = self.get_study(context, record)
if study:
for species in study.species:
source = Species.UMLS_SOURCES[species.concept_source_id]
self.tally((species.term, species.concept_id))
class LocationHandler (StatHandler):
def process (self, context, record):
study = self.get_study(context, record)
if study:
for location in study.locations:
self.tally((location.country, location.name))
| {
"repo_name": "dchud/sentinel",
"path": "canary/stats.py",
"copies": "1",
"size": "5600",
"license": "mit",
"hash": 5791502559188242000,
"line_mean": 27,
"line_max": 79,
"alpha_frac": 0.5896428571,
"autogenerated": false,
"ratio": 4.031677465802736,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5121320322902736,
"avg_score": null,
"num_lines": null
} |
# $Id$
from cPickle import load, dump
import logging
import os
import shelve
from time import time
import traceback
from mx.DateTime import localtime, mktime
from quixote import get_publisher
from quixote.errors import AccessError
from quixote.form2 import Form
from quixote.publish import SessionPublisher
from quixote.session import Session, SessionManager
import canary.context
import canary.user
# Sql* classes adapted from Titus Brown's examples at:
#
# http://issola.caltech.edu/~t/transfer/sql_example/session.py
#
class SqlQuixoteSession (object, Session):
def __init__ (self, request, id):
Session.__init__(self, request, id)
self.messages = ''
self._dirty = 0
self._form_tokens = []
#self._Session__access_time = self.__access_time
#self._Session__creation_time = self.__creation_time
#self._Session__remote_address = self.remote_address
def __getattr__ (self, att):
if att == '_Session__remote_address':
return self.__remote_address
elif att == '_Session__creation_time':
return self.__creation_time
def add_message (self, msg=''):
if len(self.messages) == 0:
self.messages = str(msg)
else:
self.messages += '~~%s' % str(msg)
self._dirty = 1
def has_messages (self):
return len(self.messages) > 0
def clear_messages (self):
self.messages = ''
self._dirty = 1
def set_user (self, user):
"""
Set the user! The change in user will be detected by Quixote
through the 'is_dirty' function and saved accordingly.
"""
if not self.user or user.id != self.user.id:
self._dirty = 1
self.user = user
def has_info (self):
"""
Is this session worthy of storage?
"""
return self.user
def is_dirty (self):
"""
Check to see if this session needs to be stored again, e.g. if
the user has changed.
"""
return self._dirty
def _set_access_time (self, resolution):
Session._set_access_time(self, resolution)
self._dirty = 1
class SqlTableMap:
"""
Intercept dictionary requests and channel them to the SQL database.
"""
def __init__ (self, context):
"""
WAS: Store the database connection.
"""
self.uncommitted = {}
self.context = context
def get_conn (self):
"""
Return the database connection after doing a rollback.
"""
conn = self.context.connection
#try:
# conn.rollback()
#except NotSupportedError:
# pass
return conn
def keys (self):
"""
Get a list of the session IDs in the database.
"""
cursor = self.context.get_cursor()
#context.execute("SELECT uid FROM sessions")
cursor.execute("SELECT session_id FROM sessions")
return [id for (id,) in cursor.fetchall()]
def values (self):
"""
Load all of the sessions in the database.
"""
cursor = self.context.get_cursor()
cursor.execute("""
SELECT session_id, user_id, remote_addr, creation_time,
access_time, messages, form_tokens
FROM sessions
""")
return [self._create_from_db(session_id, user_id, addr, c, a, msg, tokens) \
for (session_id, user_id, addr, c, a, msg, tokens) in cursor.fetchall() ]
def items (self):
"""
Get a list of the (key, value) pairs in the database.
"""
d = {}
for v in self.values():
d[v.id] = v
return d
def get (self, session_id, default=None):
"""
Get the given item from the database.
"""
cursor = self.context.get_cursor()
cursor.execute("""
SELECT session_id, user_id, remote_addr, creation_time,
access_time, messages, form_tokens
FROM sessions
WHERE session_id=%(session_id)s
""", {'session_id': session_id})
assert cursor.rowcount <= 1
if cursor.rowcount == 1:
(session_id, user_id, addr, c, a, msg, tokens) = cursor.fetchone()
return self._create_from_db(session_id, user_id, addr, c, a, msg, tokens)
else:
return default
def __getitem__ (self, session_id):
"""
Get the given session from the database.
"""
return self.get(session_id)
def has_key (self, session_id):
"""
Does this session exist in the database?
"""
if self.get(session_id) == None:
return 0
return 1
def __setitem__ (self, session_id, session):
"""
Store the given session in the database.
"""
self.uncommitted[session_id] = session
def __delitem__ (self, session_id):
"""
Delete the given session from the database.
"""
if session_id:
if self.uncommitted.has_key(session_id):
del self.uncommitted[session_id]
#conn = self.get_conn()
#cursor = self.get_conn().cursor()
#cursor = conn.cursor()
cursor = self.context.get_cursor()
cursor.execute("""
DELETE FROM sessions
WHERE session_id=%(session_id)s
""", {'session_id': session_id})
#conn.commit()
def _save_to_db (self, session):
"""
Save a given session to the database.
"""
#conn = self.get_conn()
#cursor = conn.cursor()
cursor = self.context.get_cursor()
# ORIGINAL: save a db-thrash by checking for update possibility
# instead of the following, which always does an extra delete-
# and-insert:
#
# don't allow multiple session IDs; this also removes it from
# the uncommitted dictionary.
del self[session.id]
#if self.uncommitted.has_key(session.id):
# del self.uncommitted[session.id]
#if self.has_key(session.id):
# cursor.execute("""
# UPDATE sessions
# SET access_time = %s, messages = %s
# WHERE session_id = %s
# """, (str(localtime(session.get_access_time())), session.messages,
# session.id))
#else:
cursor.execute("""
INSERT INTO sessions
(session_id, user_id, remote_addr,
creation_time,
access_time,
messages,
form_tokens)
VALUES
(%s, %s, %s,
%s,
%s,
%s,
%s)
""", (session.id, session.user.id, session.get_remote_address(),
str(localtime(session.get_creation_time())),
str(localtime(session.get_access_time())),
str(session.messages),
str('~~'.join(session._form_tokens))))
#conn.commit()
def _create_from_db (self, session_id, user_id, addr, create_time,
access_time, messages, tokens=[]):
"""
Create a new session from database data.
This goes through the new-style object function __new__ rather than
through the __init__ function.
"""
session = SqlQuixoteSession.__new__(SqlQuixoteSession)
session.id = session_id
session.user = canary.user.get_user_by_id(self.context, user_id)
# FIXME: one '_' to be removed for qx-1.0
#session.__remote_address = addr
#session.__creation_time = create_time.ticks()
#session.__access_time = access_time.ticks()
try:
session.__access_time = mktime(access_time.timetuple()).ticks()
session.__creation_time = mktime(create_time.timetuple()).ticks()
except AttributeError:
session.__creation_time = create_time.ticks()
session.__access_time = access_time.ticks()
session.__remote_address = addr
session.messages = messages
session._form_tokens = tokens.split('~~')
return session
def _abort_uncommitted (self, session):
"""
Toss a session without committing any changes.
"""
if self.uncommitted.has_key(session.id):
del self.uncommitted[session.id]
class SqlSessionManager (SessionManager):
"""
A session manager that uses the SqlTableMap to map sessions into an
SQL database.
"""
def __init__ (self, context):
SessionManager.__init__(self, SqlQuixoteSession, SqlTableMap(context))
self.context = context
def abort_changes (self, session):
if session:
self.sessions._abort_uncommitted(session)
def commit_changes (self, session):
if session \
and session.has_info():
self.sessions._save_to_db(session)
class CanaryPublisher (SessionPublisher):
def __init__ (self, *args, **kwargs):
self.logger = logging.getLogger(str(self.__class__))
try:
self.context = kwargs['context']
self.logger.info('Found context')
except KeyError:
self.context = canary.context.Context()
self.logger.info('Started new context')
self.config = self.context.config
SessionPublisher.__init__(self, root_namespace='canary.ui',
session_mgr=SqlSessionManager(self.context), config=self.config)
self.setup_logs()
class NotLoggedInError (AccessError):
"""
To be called when the requested action requires a logged in user.
Whether that user has access rights or not can only be determined
after the user actually logs in.
"""
status_code = 403
title = "Access denied"
description = "Authorized access only."
class MyForm (Form):
"""
Automatically creates a logger instance on any arbitrary Form.
"""
def __init__ (self, context, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.logger = logging.getLogger(str(self.__class__))
| {
"repo_name": "dchud/sentinel",
"path": "canary/qx_defs.py",
"copies": "1",
"size": "10296",
"license": "mit",
"hash": -6240295501764007000,
"line_mean": 29.1052631579,
"line_max": 85,
"alpha_frac": 0.558954934,
"autogenerated": false,
"ratio": 4.168421052631579,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5227375986631578,
"avg_score": null,
"num_lines": null
} |
# $Id$
from cStringIO import StringIO
import traceback
import urllib
from elementtree import ElementTree as etree
from elementtree.ElementTree import Element, SubElement
import canary
from canary import dtuple
from canary.utils import DTable
def find_resolver (context, ip_address='127.0.0.1'):
"""For a given web user's IP address, find an appropriate resolver from
OCLC's registry. We don't want to clobber OCLC's server, though. So first
check local cache, though, and if it's not available locally then query OCLC
and be sure to save the Resolver when we're done."""
r = Resolver(context, -1, ip_address)
if r.base_url:
return r
r.load_from_db(context)
if r.base_url:
return r
r.load_from_xml(context)
r.save(context)
if r.base_url == 'http://worldcatlibraries.org/registry/gateway':
return None
else:
return r
class Resolver (canary.context.Cacheable, DTable):
"""Represents a single institutional OpenURL resolver. Provides enough
functionality to determine, for a given site user's IP_ADDRESS, whether
such a resolver is available, and if not, provides reasonable default
values pointing to OCLC's Find in a Library resolver."""
TABLE_NAME = 'resolvers'
CACHE_KEY = 'resolver'
CACHE_CHECK_FIELD = 'base_url'
CACHE_ID_FIELD = 'ip_address'
# This should probably be in Config, but it's easy enough to change here
LOOKUP_URL = 'http://worldcatlibraries.org/registry/lookup'
NS = 'http://worldcatlibraries.org/registry/resolver'
load = canary.context.Cacheable.load
def __init__ (self, context=None, uid=-1, ip_address='127.0.0.1'):
try:
if self.uid >= 0:
return
except AttributeError:
pass
self.uid = uid
self.ip_address = ip_address
self.record_xml = ''
self.base_url = self.icon_url = self.link_text = self.institution = ''
self.supports_01 = self.supports_10 = False
self.date_modified = ''
def load (self, context):
# Is it already loaded? Convenience check for client calls
# don't need to verify loads from the cache.
if context.config.use_cache:
try:
if getattr(self, self.CACHE_CHECK_FIELD):
# Already loaded
return
except AttributeError:
# Not already loaded, so continue
pass
self.load_from_db(context)
if not self.base_url:
self.load_from_xml(context)
def load_from_db (self, context):
cursor = context.get_cursor()
cursor.execute("""SELECT * FROM """ + self.TABLE_NAME + """
WHERE ip_address = %s
""", self.ip_address)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = cursor.fetchone()
if row:
row = dtuple.DatabaseTuple(desc, row)
for k, v in row.items():
self.set(k, v)
else:
context.logger.error('No %s with %s %s in local database' % (self.__class__.__name__,
self.CACHE_ID_FIELD, getattr(self, self.CACHE_ID_FIELD)))
def load_from_xml (self, context):
url = '%s?IP=%s' % (self.LOOKUP_URL, self.ip_address)
try:
fp = urllib.urlopen(url)
self.record_xml = fp.read()
fp.close()
# NOTE: Is it safe to pickle an ElementTree tree?
# Don't bind to self if not.
self.tree = etree.parse(StringIO(self.record_xml))
records = self.tree.findall('.//{%s}resolverRegistryEntry' % self.NS)
if not records:
raise 'InvalidIPAddress'
for attr, studly in [
('institution', 'institutionName'),
('base_url', 'baseURL'),
('icon_url', 'linkIcon'),
('link_text', 'linkText')]:
try:
setattr(self, attr, self.tree.findall('.//{%s}%s' % \
(self.NS, studly))[0].text)
except:
setattr(self, attr, '')
except:
# Assign default OCLC "Find in a Library" values as useful fallback
self.institution = 'OCLC'
self.base_url = 'http://worldcatlibraries.org/registry/gateway'
if not self.link_text:
self.link_text = 'Find in a library'
if not self.icon_url:
# Note: without a context
self.icon_url = '%s/images/findinalibrary_badge.gif' % \
context.config.site_base_url
def save (self, context):
cursor = context.get_cursor()
try:
if self.uid == -1:
insert_phrase = 'INSERT INTO %s' % self.TABLE_NAME
cursor.execute(insert_phrase + """
(uid, ip_address, record_xml, institution, base_url,
icon_url, link_text,
supports_01, supports_10, date_modified)
VALUES
(NULL, %s, %s, %s,
%s, %s, %s,
%s, %s, NOW())
""", (self.ip_address, self.record_xml, self.institution,
self.base_url, self.icon_url, self.link_text,
int(self.supports_01), int(self.supports_10))
)
self.uid = self.get_new_uid(context)
context.logger.info('Resolver created with uid %s', self.uid)
else:
update_phrase = 'UPDATE %s ' % self.TABLE_NAME
cursor.execute(update_phrase + """
SET record_xml = %s, base_url = %s,
institution = %s, icon_url = %s, link_text = %s,
supports_01 = %s, supports_10 = %s, date_modified = NOW()
WHERE ip_address = %s
""", (self.record_xml, self.base_url,
self.institution, self.icon_url, self.link_text,
int(self.supports_01), int(self.supports_10),
self.ip_address)
)
context.logger.info('Resolver %s updated', self.uid)
if context.config.use_cache:
context.cache_set('%s:%s' % (self.CACHE_KEY,
getattr(self, self.CACHE_ID_FIELD)), self)
except Exception, e:
context.logger.error(e)
| {
"repo_name": "dchud/sentinel",
"path": "canary/resolver.py",
"copies": "1",
"size": "6664",
"license": "mit",
"hash": -5701826900031695000,
"line_mean": 36.8636363636,
"line_max": 97,
"alpha_frac": 0.5333133253,
"autogenerated": false,
"ratio": 4.036341611144761,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9982273368127583,
"avg_score": 0.01747631366343557,
"num_lines": 176
} |
# $Id$
from dpkt import Packet
# header_type
QQ_HEADER_BASIC_FAMILY = 0x02
QQ_HEADER_P2P_FAMILY = 0x00
QQ_HEADER_03_FAMILY = 0x03
QQ_HEADER_04_FAMILY = 0x04
QQ_HEADER_05_FAMILY = 0x05
header_type_str = [
"QQ_HEADER_P2P_FAMILY",
"Unknown Type",
"QQ_HEADER_03_FAMILY",
"QQ_HEADER_04_FAMILY",
"QQ_HEADER_05_FAMILY",
]
# command
QQ_CMD_LOGOUT = 0x0001
QQ_CMD_KEEP_ALIVE = 0x0002
QQ_CMD_MODIFY_INFO = 0x0004
QQ_CMD_SEARCH_USER = 0x0005
QQ_CMD_GET_USER_INFO = 0x0006
QQ_CMD_ADD_FRIEND = 0x0009
QQ_CMD_DELETE_FRIEND = 0x000A
QQ_CMD_ADD_FRIEND_AUTH = 0x000B
QQ_CMD_CHANGE_STATUS = 0x000D
QQ_CMD_ACK_SYS_MSG = 0x0012
QQ_CMD_SEND_IM = 0x0016
QQ_CMD_RECV_IM = 0x0017
QQ_CMD_REMOVE_SELF = 0x001C
QQ_CMD_REQUEST_KEY = 0x001D
QQ_CMD_LOGIN = 0x0022
QQ_CMD_GET_FRIEND_LIST = 0x0026
QQ_CMD_GET_ONLINE_OP = 0x0027
QQ_CMD_SEND_SMS = 0x002D
QQ_CMD_CLUSTER_CMD = 0x0030
QQ_CMD_TEST = 0x0031
QQ_CMD_GROUP_DATA_OP = 0x003C
QQ_CMD_UPLOAD_GROUP_FRIEND = 0x003D
QQ_CMD_FRIEND_DATA_OP = 0x003E
QQ_CMD_DOWNLOAD_GROUP_FRIEND = 0x0058
QQ_CMD_FRIEND_LEVEL_OP = 0x005C
QQ_CMD_PRIVACY_DATA_OP = 0x005E
QQ_CMD_CLUSTER_DATA_OP = 0x005F
QQ_CMD_ADVANCED_SEARCH = 0x0061
QQ_CMD_REQUEST_LOGIN_TOKEN = 0x0062
QQ_CMD_USER_PROPERTY_OP = 0x0065
QQ_CMD_TEMP_SESSION_OP = 0x0066
QQ_CMD_SIGNATURE_OP = 0x0067
QQ_CMD_RECV_MSG_SYS = 0x0080
QQ_CMD_RECV_MSG_FRIEND_CHANGE_STATUS = 0x0081
QQ_CMD_WEATHER_OP = 0x00A6
QQ_CMD_ADD_FRIEND_EX = 0x00A7
QQ_CMD_AUTHORIZE = 0X00A8
QQ_CMD_UNKNOWN = 0xFFFF
QQ_SUB_CMD_SEARCH_ME_BY_QQ_ONLY = 0x03
QQ_SUB_CMD_SHARE_GEOGRAPHY = 0x04
QQ_SUB_CMD_GET_FRIEND_LEVEL = 0x02
QQ_SUB_CMD_GET_CLUSTER_ONLINE_MEMBER = 0x01
QQ_05_CMD_REQUEST_AGENT = 0x0021
QQ_05_CMD_REQUEST_FACE = 0x0022
QQ_05_CMD_TRANSFER = 0x0023
QQ_05_CMD_REQUEST_BEGIN = 0x0026
QQ_CLUSTER_CMD_CREATE_CLUSTER= 0x01
QQ_CLUSTER_CMD_MODIFY_MEMBER= 0x02
QQ_CLUSTER_CMD_MODIFY_CLUSTER_INFO= 0x03
QQ_CLUSTER_CMD_GET_CLUSTER_INFO= 0x04
QQ_CLUSTER_CMD_ACTIVATE_CLUSTER= 0x05
QQ_CLUSTER_CMD_SEARCH_CLUSTER= 0x06
QQ_CLUSTER_CMD_JOIN_CLUSTER= 0x07
QQ_CLUSTER_CMD_JOIN_CLUSTER_AUTH= 0x08
QQ_CLUSTER_CMD_EXIT_CLUSTER= 0x09
QQ_CLUSTER_CMD_SEND_IM= 0x0A
QQ_CLUSTER_CMD_GET_ONLINE_MEMBER= 0x0B
QQ_CLUSTER_CMD_GET_MEMBER_INFO= 0x0C
QQ_CLUSTER_CMD_MODIFY_CARD = 0x0E
QQ_CLUSTER_CMD_GET_CARD_BATCH= 0x0F
QQ_CLUSTER_CMD_GET_CARD = 0x10
QQ_CLUSTER_CMD_COMMIT_ORGANIZATION = 0x11
QQ_CLUSTER_CMD_UPDATE_ORGANIZATION= 0x12
QQ_CLUSTER_CMD_COMMIT_MEMBER_ORGANIZATION = 0x13
QQ_CLUSTER_CMD_GET_VERSION_ID= 0x19
QQ_CLUSTER_CMD_SEND_IM_EX = 0x1A
QQ_CLUSTER_CMD_SET_ROLE = 0x1B
QQ_CLUSTER_CMD_TRANSFER_ROLE = 0x1C
QQ_CLUSTER_CMD_CREATE_TEMP = 0x30
QQ_CLUSTER_CMD_MODIFY_TEMP_MEMBER = 0x31
QQ_CLUSTER_CMD_EXIT_TEMP = 0x32
QQ_CLUSTER_CMD_GET_TEMP_INFO = 0x33
QQ_CLUSTER_CMD_MODIFY_TEMP_INFO = 0x34
QQ_CLUSTER_CMD_SEND_TEMP_IM = 0x35
QQ_CLUSTER_CMD_SUB_CLUSTER_OP = 0x36
QQ_CLUSTER_CMD_ACTIVATE_TEMP = 0x37
QQ_CLUSTER_SUB_CMD_ADD_MEMBER = 0x01
QQ_CLUSTER_SUB_CMD_REMOVE_MEMBER = 0x02
QQ_CLUSTER_SUB_CMD_GET_SUBJECT_LIST = 0x02
QQ_CLUSTER_SUB_CMD_GET_DIALOG_LIST = 0x01
QQ_SUB_CMD_GET_ONLINE_FRIEND = 0x2
QQ_SUB_CMD_GET_ONLINE_SERVICE = 0x3
QQ_SUB_CMD_UPLOAD_GROUP_NAME = 0x2
QQ_SUB_CMD_DOWNLOAD_GROUP_NAME = 0x1
QQ_SUB_CMD_SEND_TEMP_SESSION_IM = 0x01
QQ_SUB_CMD_BATCH_DOWNLOAD_FRIEND_REMARK = 0x0
QQ_SUB_CMD_UPLOAD_FRIEND_REMARK = 0x1
QQ_SUB_CMD_REMOVE_FRIEND_FROM_LIST = 0x2
QQ_SUB_CMD_DOWNLOAD_FRIEND_REMARK = 0x3
QQ_SUB_CMD_MODIFY_SIGNATURE = 0x01
QQ_SUB_CMD_DELETE_SIGNATURE = 0x02
QQ_SUB_CMD_GET_SIGNATURE = 0x03
QQ_SUB_CMD_GET_USER_PROPERTY = 0x01
QQ_SUB_CMD_GET_WEATHER = 0x01
QQ_FILE_CMD_HEART_BEAT = 0x0001
QQ_FILE_CMD_HEART_BEAT_ACK = 0x0002
QQ_FILE_CMD_TRANSFER_FINISHED = 0x0003
QQ_FILE_CMD_FILE_OP = 0x0007
QQ_FILE_CMD_FILE_OP_ACK = 0x0008
QQ_FILE_CMD_SENDER_SAY_HELLO = 0x0031
QQ_FILE_CMD_SENDER_SAY_HELLO_ACK = 0x0032
QQ_FILE_CMD_RECEIVER_SAY_HELLO = 0x0033
QQ_FILE_CMD_RECEIVER_SAY_HELLO_ACK = 0x0034
QQ_FILE_CMD_NOTIFY_IP_ACK = 0x003C
QQ_FILE_CMD_PING = 0x003D
QQ_FILE_CMD_PONG = 0x003E
QQ_FILE_CMD_YES_I_AM_BEHIND_FIREWALL = 0x0040
QQ_FILE_CMD_REQUEST_AGENT = 0x0001
QQ_FILE_CMD_CHECK_IN = 0x0002
QQ_FILE_CMD_FORWARD = 0x0003
QQ_FILE_CMD_FORWARD_FINISHED = 0x0004
QQ_FILE_CMD_IT_IS_TIME = 0x0005
QQ_FILE_CMD_I_AM_READY = 0x0006
command_str = {
0x0001: "QQ_CMD_LOGOUT",
0x0002: "QQ_CMD_KEEP_ALIVE",
0x0004: "QQ_CMD_MODIFY_INFO",
0x0005: "QQ_CMD_SEARCH_USER",
0x0006: "QQ_CMD_GET_USER_INFO",
0x0009: "QQ_CMD_ADD_FRIEND",
0x000A: "QQ_CMD_DELETE_FRIEND",
0x000B: "QQ_CMD_ADD_FRIEND_AUTH",
0x000D: "QQ_CMD_CHANGE_STATUS",
0x0012: "QQ_CMD_ACK_SYS_MSG",
0x0016: "QQ_CMD_SEND_IM",
0x0017: "QQ_CMD_RECV_IM",
0x001C: "QQ_CMD_REMOVE_SELF",
0x001D: "QQ_CMD_REQUEST_KEY",
0x0022: "QQ_CMD_LOGIN",
0x0026: "QQ_CMD_GET_FRIEND_LIST",
0x0027: "QQ_CMD_GET_ONLINE_OP",
0x002D: "QQ_CMD_SEND_SMS",
0x0030: "QQ_CMD_CLUSTER_CMD",
0x0031: "QQ_CMD_TEST",
0x003C: "QQ_CMD_GROUP_DATA_OP",
0x003D: "QQ_CMD_UPLOAD_GROUP_FRIEND",
0x003E: "QQ_CMD_FRIEND_DATA_OP",
0x0058: "QQ_CMD_DOWNLOAD_GROUP_FRIEND",
0x005C: "QQ_CMD_FRIEND_LEVEL_OP",
0x005E: "QQ_CMD_PRIVACY_DATA_OP",
0x005F: "QQ_CMD_CLUSTER_DATA_OP",
0x0061: "QQ_CMD_ADVANCED_SEARCH",
0x0062: "QQ_CMD_REQUEST_LOGIN_TOKEN",
0x0065: "QQ_CMD_USER_PROPERTY_OP",
0x0066: "QQ_CMD_TEMP_SESSION_OP",
0x0067: "QQ_CMD_SIGNATURE_OP",
0x0080: "QQ_CMD_RECV_MSG_SYS",
0x0081: "QQ_CMD_RECV_MSG_FRIEND_CHANGE_STATUS",
0x00A6: "QQ_CMD_WEATHER_OP",
0x00A7: "QQ_CMD_ADD_FRIEND_EX",
0x00A8: "QQ_CMD_AUTHORIZE",
0xFFFF: "QQ_CMD_UNKNOWN",
0x0021: "_CMD_REQUEST_AGENT",
0x0022: "_CMD_REQUEST_FACE",
0x0023: "_CMD_TRANSFER",
0x0026: "_CMD_REQUEST_BEGIN",
}
class QQBasicPacket(Packet):
__hdr__ = (
('header_type', 'B', 2),
('source', 'H', 0),
('command', 'H', 0),
('sequence', 'H', 0),
('qqNum', 'L', 0),
)
class QQ3Packet(Packet):
__hdr__ = (
('header_type', 'B', 3),
('command', 'B', 0),
('sequence', 'H', 0),
('unknown1', 'L', 0),
('unknown2', 'L', 0),
('unknown3', 'L', 0),
('unknown4', 'L', 0),
('unknown5', 'L', 0),
('unknown6', 'L', 0),
('unknown7', 'L', 0),
('unknown8', 'L', 0),
('unknown9', 'L', 0),
('unknown10', 'B', 1),
('unknown11', 'B', 0),
('unknown12', 'B', 0),
('source', 'H', 0),
('unknown13', 'B', 0),
)
class QQ5Packet(Packet):
__hdr__ = (
('header_type', 'B', 5),
('source', 'H', 0),
('unknown', 'H', 0),
('command', 'H', 0),
('sequence', 'H', 0),
('qqNum', 'L', 0),
)
| {
"repo_name": "FunctionAnalysis/dpkt",
"path": "dpkt/qq.py",
"copies": "17",
"size": "6773",
"license": "bsd-3-clause",
"hash": 3831721841113930000,
"line_mean": 29.2366071429,
"line_max": 51,
"alpha_frac": 0.6531817511,
"autogenerated": false,
"ratio": 2.278170198452741,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
from __future__ import with_statement
__author__ = 'Thomas Rueckstiess, [email protected]'
import random
import pickle
from itertools import chain
from scipy import zeros, resize, ravel, asarray
from pybrain.utilities import Serializable
class OutOfSyncError(Exception): pass
class VectorFormatError(Exception): pass
class NoLinkedFieldsError(Exception): pass
class DataSet(Serializable):
"""DataSet is a general base class for other data set classes
(e.g. SupervisedDataSet, SequentialDataSet, ...). It consists of several
fields. A field is a NumPy array with a label (a string) attached to it.
Fields can be linked together which means they must have the same length."""
def __init__(self):
self.data = {}
self.endmarker = {}
self.link = []
self.index = 0
# row vectors returned by getLinked can have different formats:
# '1d' example: array([1, 2, 3])
# '2d' example: array([[1, 2, 3]])
# 'list' example: [1, 2, 3]
self.vectorformat = 'none'
def __str__(self):
"""Return a string representation of a dataset."""
s = ""
for key in self.data:
s = s + key + ": dim" + str(self.data[key].shape) + "\n" + str(self.data[key][:self.endmarker[key]]) + "\n\n"
return s
def __getitem__(self, field):
"""Return the given field."""
return self.getField(field)
def __iter__(self):
self.reset()
while not self.endOfData():
yield self.getLinked()
def getVectorFormat(self):
"""Returns the current vector format."""
return self.__vectorformat
def setVectorFormat(self, vf):
"""Determine which format to use for returning vectors. Use the property vectorformat.
@param type: possible types are '1d', '2d', 'list'
'1d' - example: array([1,2,3])
'2d' - example: array([[1,2,3]])
'list' - example: [1,2,3]
'none' - no conversion
"""
switch = {
'1d': self._convertArray1d,
'2d': self._convertArray2d,
'list': self._convertList,
'none': lambda(x):x
}
try:
self._convert = switch[vf]
self.__vectorformat = vf
except KeyError:
raise VectorFormatError("vector format must be one of '1d', '2d', 'list'. given: %s" % vf)
vectorformat = property(getVectorFormat, setVectorFormat, None, "vectorformat can be '1d', '2d' or 'list'")
def _convertList(self, vector):
"""Converts the incoming vector to a python list."""
return ravel(vector).tolist()
def _convertArray1d(self, vector):
"""Converts the incoming vector to a 1d vector with shape (x,) where x
is the number of elements."""
return ravel(vector)
def _convertArray2d(self, vector, column=False):
"""Converts the incoming `vector` to a 2d vector with shape (1,x), or
(x,1) if `column` is set, where x is the number of elements."""
a = asarray(vector)
sh = a.shape
# also reshape scalar values to 2d-index
if len(sh) == 0:
sh = (1,)
if len(sh) == 1:
# use reshape to add extra dimension
if column:
return a.reshape((sh[0], 1))
else:
return a.reshape((1, sh[0]))
else:
# vector is not 1d, return a without change
return a
def addField(self, label, dim):
"""Add a field to the dataset.
A field consists of a string `label` and a numpy ndarray of dimension
`dim`."""
self.data[label] = zeros((0, dim), float)
self.endmarker[label] = 0
def setField(self, label, arr):
"""Set the given array `arr` as the new array of field `label`,"""
as_arr = asarray(arr)
self.data[label] = as_arr
self.endmarker[label] = as_arr.shape[0]
def linkFields(self, linklist):
"""Link the length of several fields given by the list of strings
`linklist`."""
length = self[linklist[0]].shape[0]
for l in linklist:
if self[l].shape[0] != length:
raise OutOfSyncError
self.link = linklist
def unlinkFields(self, unlinklist=None):
"""Remove fields from the link list or clears link given by the list of
string `linklist`.
This method has no effect if fields are not linked."""
link = self.link
if unlinklist is not None:
for l in unlinklist:
if l in self.link:
link.remove(l)
self.link = link
else:
self.link = []
def getDimension(self, label):
"""Return the dimension/number of columns for the field given by
`label`."""
try:
dim = self.data[label].shape[1]
except KeyError:
raise KeyError('dataset field %s not found.' % label)
return dim
def __len__(self):
"""Return the length of the linked data fields. If no linked fields exist,
return the length of the longest field."""
return self.getLength()
def getLength(self):
"""Return the length of the linked data fields. If no linked fields exist,
return the length of the longest field."""
if self.link == []:
try:
length = self.endmarker[max(self.endmarker)]
except ValueError:
return 0
return length
else:
# all linked fields have equal length. return the length of the first.
l = self.link[0]
return self.endmarker[l]
def _resize(self, label=None):
if label:
label = [label]
elif self.link:
label = self.link
else:
label = self.data
for l in label:
self.data[l] = self._resizeArray(self.data[l])
def _resizeArray(self, a):
"""Increase the buffer size. It should always be one longer than the
current sequence length and double on every growth step."""
shape = list(a.shape)
shape[0] = (shape[0]+1) * 2
return resize(a, shape)
def _appendUnlinked(self, label, row):
"""Append `row` to the field array with the given `label`.
Do not call this function from outside, use ,append() instead.
Automatically casts vector to a 2d (or higher) shape."""
if self.data[label].shape[0] <= self.endmarker[label]:
self._resize(label)
self.data[label][self.endmarker[label],:] = row
self.endmarker[label] += 1
def append(self, label, row):
"""Append `row` to the array given by `label`.
If the field is linked with others, the function throws an
`OutOfSyncError` because all linked fields always have to have the same
length. If you want to add a row to all linked fields, use appendLink
instead."""
if label in self.link:
raise OutOfSyncError
self._appendUnlinked(label, row)
def appendLinked(self, *args):
"""Add rows to all linked fields at once."""
assert len(args) == len(self.link)
for i,l in enumerate(self.link):
self._appendUnlinked(l, args[i])
def getLinked(self, index=None):
"""Access the dataset randomly or sequential.
If called with `index`, the appropriate line consisting of all linked
fields is returned and the internal marker is set to the next line.
Otherwise the marked line is returned and the marker is moved to the
next line."""
if self.link == []:
raise NoLinkedFieldsError('The dataset does not have any linked fields.')
if index == None:
# no index given, return the currently marked line and step marker one line forward
index = self.index
self.index += 1
else:
# return the indexed line and move marker to next line
self.index = index + 1
if index >= self.getLength():
raise IndexError('index out of bounds of the dataset.')
return [self._convert(self.data[l][index]) for l in self.link]
def getField(self, label):
"""Return the entire field given by `label` as an array or list,
depending on user settings."""
if self.vectorformat == 'list':
return self.data[label][:self.endmarker[label]].tolist()
else:
return self.data[label][:self.endmarker[label]]
def hasField(self, label):
"""Tell whether the field given by `label` exists."""
return self.data.has_key(label)
def getFieldNames(self):
"""Return the names of the currently defined fields."""
return self.data.keys()
def convertField(self, label, newtype):
"""Convert the given field to a different data type."""
try:
self.setField(label, self.data[label].astype(newtype))
except KeyError:
raise KeyError('convertField: dataset field %s not found.' % label)
def endOfData(self):
"""Tell if the end of the data set is reached."""
return self.index == self.getLength()
def reset(self):
"""Reset the marker to the first line."""
self.index = 0
def clear(self, unlinked=False):
"""Clear the dataset.
If linked fields exist, only the linked fields will be deleted unless
`unlinked` is set to True. If no fields are linked, all data will be
deleted."""
self.reset()
keys = self.link
if keys == [] or unlinked:
# iterate over all fields instead
keys = self.data
for k in keys:
shape = list(self.data[k].shape)
# set to zero rows
shape[0] = 0
self.data[k] = zeros(shape)
self.endmarker[k] = 0
@classmethod
def reconstruct(cls, filename):
"""Read an incomplete data set (option arraysonly) into the given one. """
# FIXME: Obsolete! Kept here because of some old files...
obj = cls(1,1)
for key, val in pickle.load(file(filename)).iteritems():
obj.setField(key, val)
return obj
def save_pickle(self, flo, protocol=0, compact=False):
"""Save data set as pickle, removing empty space if desired."""
if compact:
# remove padding of zeros for each field
for field in self.getFieldNames():
temp = self[field][0:self.endmarker[field]+1,:]
self.setField(field, temp)
Serializable.save_pickle(self, flo, protocol)
def __reduce__(self):
def creator():
obj = self.__class__()
obj.vectorformat = self.vectorformat
return obj
args = tuple()
state = {
'data': self.data,
'link': self.link,
'endmarker': self.endmarker,
}
return creator, args, state, iter([]), iter({})
def copy(self):
"""Return a deep copy."""
import copy
return copy.deepcopy(self)
def batches(self, label, n, permutation=None):
"""Yield batches of the size of n from the dataset.
A single batch is an array of with dim columns and n rows. The last
batch is possibly smaller.
If permutation is given, batches are yielded in the corresponding
order."""
# First calculate how many batches we will have
full_batches, rest = divmod(len(self), n)
number_of_batches = full_batches if rest == 0 else full_batches + 1
# We make one iterator for the startindexes ...
startindexes = (i * n for i in xrange(number_of_batches))
# ... and one for the stop indexes
stopindexes = (((i + 1) * n) for i in xrange(number_of_batches - 1))
# The last stop index is the last element of the list (last batch
# might not be filled completely)
stopindexes = chain(stopindexes, [len(self)])
# Now combine them
indexes = zip(startindexes, stopindexes)
# Shuffle them according to the permutation if one is given
if permutation is not None:
indexes = [indexes[i] for i in permutation]
for start, stop in indexes:
yield self.data[label][start:stop]
def randomBatches(self, label, n):
"""Like .batches(), but the order is random."""
permutation = random.shuffle(range(len(self)))
return self.batches(label, n, permutation)
| {
"repo_name": "daanwierstra/pybrain",
"path": "pybrain/datasets/dataset.py",
"copies": "1",
"size": "13134",
"license": "bsd-3-clause",
"hash": -3280491516552942600,
"line_mean": 35.4861111111,
"line_max": 122,
"alpha_frac": 0.5615958581,
"autogenerated": false,
"ratio": 4.283757338551859,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5345353196651859,
"avg_score": null,
"num_lines": null
} |
# $Id$
from mod_python import apache
import cgi, fakefile
class Error(Exception):
pass
class InputStream(fakefile.FakeInput):
def __init__(self, modpy_req):
fakefile.FakeInput.__init__(self)
self._modpy_req = modpy_req
def _read(self, size=-1):
return self._modpy_req.read(size)
class Request(cgi.Request):
def _init(self, modpy_req):
self._modpy_req = modpy_req
self._build_environ()
self._redirected = 0
self.stdin = InputStream(modpy_req)
super(Request, self)._init()
def _build_environ(self):
modpy_req = self._modpy_req
modpy_req.add_common_vars()
self.environ = {}
env = self.environ
for key in modpy_req.subprocess_env.keys():
env[key] = modpy_req.subprocess_env[key]
if len(modpy_req.path_info):
env["SCRIPT_NAME"] = modpy_req.uri[:-len(modpy_req.path_info)]
else:
env["SCRIPT_NAME"] = modpy_req.uri
def output_headers(self):
if self._doneHeaders:
raise cgi.SequencingError("output_headers() called twice")
if self._redirected:
self._modpy_req.status = apache.HTTP_MOVED_TEMPORARILY
self._modpy_req.send_http_header()
self._doneHeaders = 1
def clear_headers(self):
if self._doneHeaders:
raise cgi.SequencingError("cannot clear_headers() after output_headers()")
for key in self._modpy_req.headers_out.keys():
del self._modpy_req.headers_out[key]
self._redirected = 0
def add_header(self, hdr, val):
if self._doneHeaders:
raise cgi.SequencingError(
"cannot add_header(%s) after output_headers()" % repr(hdr))
if hdr.lower() == "content-type":
self._modpy_req.content_type = val
else:
self._modpy_req.headers_out.add(hdr, val)
if hdr.lower() == "location":
self._redirected = 1
def set_header(self, hdr, val):
if self._doneHeaders:
raise cgi.SequencingError(
"cannot set_header(%s) after output_headers()" % repr(hdr))
if hdr.lower() == "content-type":
self._modpy_req.content_type = val
else:
self._modpy_req.headers_out[hdr] = val
if hdr.lower() == "location":
self._redirected = 1
def get_header(self, hdr, index=0):
val = self._modpy_req.headers_out.get(hdr)
if val is None:
return val
if isinstance(val, str):
if index == 0:
return val
return None
if len(val) > index:
return val[index]
return None
def del_header(self, hdr, val):
if self._doneHeaders:
raise cgi.SequencingError(
"cannot del_header(%s) after output_headers()" % repr(hdr))
if hdr.lower() == "content-Type":
raise Error("cannot del_header(\"Content-Type\")")
del self._modpy_req.headers_out[hdr]
if hdr.lower() == "location":
self._redirected = 0
def process(self, modpy_req):
self._init(modpy_req)
try:
handler = self._handler_type()
except:
self.traceback()
else:
try:
handler.process(self)
except:
handler.traceback(self)
self.flush()
return apache.OK
def error(self, s):
apache.log_error(s, apache.APLOG_ERR, self._modpy_req.server)
def _write(self, s):
if not self.aborted:
self._modpy_req.write(s)
class GZipRequest(cgi.GZipMixIn, Request):
pass
| {
"repo_name": "jribbens/jonpy",
"path": "jon/modpy.py",
"copies": "1",
"size": "3277",
"license": "mit",
"hash": -2300838051737321500,
"line_mean": 25.8606557377,
"line_max": 80,
"alpha_frac": 0.6280134269,
"autogenerated": false,
"ratio": 3.2381422924901186,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9290307517418144,
"avg_score": 0.015169640394394826,
"num_lines": 122
} |
# $Id$
from module_base import ModuleBase
from module_mixins import FilenameViewModuleMixin
import module_utils
import vtk
class vtpRDR(FilenameViewModuleMixin, ModuleBase):
def __init__(self, module_manager):
# call parent constructor
ModuleBase.__init__(self, module_manager)
self._reader = vtk.vtkXMLPolyDataReader()
# ctor for this specific mixin
FilenameViewModuleMixin.__init__(
self,
'Select a filename',
'VTK Poly Data (*.vtp)|*.vtp|All files (*)|*',
{'vtkXMLPolyDataReader': self._reader})
module_utils.setup_vtk_object_progress(
self, self._reader,
'Reading VTK PolyData')
self._viewFrame = None
# set up some defaults
self._config.filename = ''
self.sync_module_logic_with_config()
def close(self):
del self._reader
FilenameViewModuleMixin.close(self)
def get_input_descriptions(self):
return ()
def set_input(self, idx, input_stream):
raise Exception
def get_output_descriptions(self):
return ('vtkPolyData',)
def get_output(self, idx):
return self._reader.GetOutput()
def logic_to_config(self):
filename = self._reader.GetFileName()
if filename == None:
filename = ''
self._config.filename = filename
def config_to_logic(self):
self._reader.SetFileName(self._config.filename)
def view_to_config(self):
self._config.filename = self._getViewFrameFilename()
def config_to_view(self):
self._setViewFrameFilename(self._config.filename)
def execute_module(self):
# get the vtkPolyDataReader to try and execute
if len(self._reader.GetFileName()):
self._reader.Update()
| {
"repo_name": "nagyistoce/devide",
"path": "modules/readers/vtpRDR.py",
"copies": "7",
"size": "1863",
"license": "bsd-3-clause",
"hash": -8256594372240891000,
"line_mean": 26,
"line_max": 60,
"alpha_frac": 0.6049382716,
"autogenerated": false,
"ratio": 4.205417607223477,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.015103257823074387,
"num_lines": 69
} |
# $Id$
from module_base import ModuleBase
from module_mixins import FilenameViewModuleMixin
import module_utils
import vtk
class plyRDR(FilenameViewModuleMixin, ModuleBase):
def __init__(self, module_manager):
# call parent constructor
ModuleBase.__init__(self, module_manager)
self._reader = vtk.vtkPLYReader()
# ctor for this specific mixin
FilenameViewModuleMixin.__init__(
self,
'Select a filename',
'(Stanford) Polygon File Format (*.ply)|*.ply|All files (*)|*',
{'vtkPLYReader': self._reader,
'Module (self)' : self})
module_utils.setup_vtk_object_progress(
self, self._reader,
'Reading PLY PolyData')
# set up some defaults
self._config.filename = ''
# there is no view yet...
self._module_manager.sync_module_logic_with_config(self)
def close(self):
del self._reader
FilenameViewModuleMixin.close(self)
def get_input_descriptions(self):
return ()
def set_input(self, idx, input_stream):
raise Exception
def get_output_descriptions(self):
return ('vtkPolyData',)
def get_output(self, idx):
return self._reader.GetOutput()
def logic_to_config(self):
filename = self._reader.GetFileName()
if filename == None:
filename = ''
self._config.filename = filename
def config_to_logic(self):
self._reader.SetFileName(self._config.filename)
def view_to_config(self):
self._config.filename = self._getViewFrameFilename()
def config_to_view(self):
self._setViewFrameFilename(self._config.filename)
def execute_module(self):
# get the vtkPLYReader to try and execute
if len(self._reader.GetFileName()):
self._reader.Update() | {
"repo_name": "nagyistoce/devide",
"path": "modules/readers/plyRDR.py",
"copies": "7",
"size": "1907",
"license": "bsd-3-clause",
"hash": 1727189728149308400,
"line_mean": 25.8732394366,
"line_max": 75,
"alpha_frac": 0.6030414263,
"autogenerated": false,
"ratio": 4.209713024282561,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.831275445058256,
"avg_score": null,
"num_lines": null
} |
# $Id$
from module_base import ModuleBase
from module_mixins import FilenameViewModuleMixin
import module_utils
import vtk
class vtiWRT(FilenameViewModuleMixin, ModuleBase):
def __init__(self, module_manager):
# call parent constructor
ModuleBase.__init__(self, module_manager)
self._writer = vtk.vtkXMLImageDataWriter()
# ctor for this specific mixin
FilenameViewModuleMixin.__init__(
self,
'Select a filename',
'VTK Image Data (*.vti)|*.vti|All files (*)|*',
{'vtkXMLImageDataWriter': self._writer},
fileOpen=False)
module_utils.setup_vtk_object_progress(
self, self._writer,
'Writing VTK ImageData')
self._writer.SetDataModeToBinary()
# set up some defaults
self._config.filename = ''
self._module_manager.sync_module_logic_with_config(self)
def close(self):
# we should disconnect all inputs
self.set_input(0, None)
del self._writer
FilenameViewModuleMixin.close(self)
def get_input_descriptions(self):
return ('vtkImageData',)
def set_input(self, idx, input_stream):
self._writer.SetInput(input_stream)
def get_output_descriptions(self):
return ()
def get_output(self, idx):
raise Exception
def logic_to_config(self):
filename = self._writer.GetFileName()
if filename == None:
filename = ''
self._config.filename = filename
def config_to_logic(self):
self._writer.SetFileName(self._config.filename)
def view_to_config(self):
self._config.filename = self._getViewFrameFilename()
def config_to_view(self):
self._setViewFrameFilename(self._config.filename)
def execute_module(self):
if len(self._writer.GetFileName()) and self._writer.GetInput():
self._writer.GetInput().UpdateInformation()
self._writer.GetInput().SetUpdateExtentToWholeExtent()
self._writer.GetInput().Update()
self._writer.Write()
def streaming_execute_module(self):
if len(self._writer.GetFileName()) and self._writer.GetInput():
sp = self._module_manager.get_app_main_config().streaming_pieces
self._writer.SetNumberOfPieces(sp)
self._writer.Write()
| {
"repo_name": "fvpolpeta/devide",
"path": "modules/writers/vtiWRT.py",
"copies": "7",
"size": "2401",
"license": "bsd-3-clause",
"hash": 1532717639169082400,
"line_mean": 27.9277108434,
"line_max": 76,
"alpha_frac": 0.6159933361,
"autogenerated": false,
"ratio": 4.111301369863014,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8227294705963014,
"avg_score": null,
"num_lines": null
} |
# $Id$
from module_base import ModuleBase
from module_mixins import ScriptedConfigModuleMixin
import module_utils
import vtk
import wx # needs this for wx.OPEN, we need to make this constant available
# elsewhere
class pngWRT(ScriptedConfigModuleMixin, ModuleBase):
def __init__(self, module_manager):
# call parent constructor
ModuleBase.__init__(self, module_manager)
# ctor for this specific mixin
# FilenameViewModuleMixin.__init__(self)
self._shiftScale = vtk.vtkImageShiftScale()
self._shiftScale.SetOutputScalarTypeToUnsignedShort()
module_utils.setup_vtk_object_progress(
self, self._shiftScale,
'Converting input to unsigned short.')
self._writer = vtk.vtkPNGWriter()
self._writer.SetFileDimensionality(3)
self._writer.SetInput(self._shiftScale.GetOutput())
module_utils.setup_vtk_object_progress(
self, self._writer, 'Writing PNG file(s)')
self._config.filePattern = '%d.png'
configList = [
('File pattern:', 'filePattern', 'base:str', 'filebrowser',
'Filenames will be built with this. See module help.',
{'fileMode' : wx.OPEN,
'fileMask' :
'PNG files (*.png)|*.png|All files (*.*)|*.*'})]
ScriptedConfigModuleMixin.__init__(
self, configList,
{'Module (self)' : self,
'vtkPNGWriter' : self._writer})
self.sync_module_logic_with_config()
def close(self):
# we play it safe... (the graph_editor/module_manager should have
# disconnected us by now)
for input_idx in range(len(self.get_input_descriptions())):
self.set_input(input_idx, None)
# this will take care of all display thingies
ScriptedConfigModuleMixin.close(self)
ModuleBase.close(self)
# get rid of our reference
del self._writer
def get_input_descriptions(self):
return ('vtkImageData',)
def set_input(self, idx, input_stream):
self._shiftScale.SetInput(input_stream)
def get_output_descriptions(self):
return ()
def get_output(self, idx):
raise Exception
def logic_to_config(self):
self._config.filePattern = self._writer.GetFilePattern()
def config_to_logic(self):
self._writer.SetFilePattern(self._config.filePattern)
def execute_module(self):
if len(self._writer.GetFilePattern()) and self._shiftScale.GetInput():
inp = self._shiftScale.GetInput()
inp.Update()
minv,maxv = inp.GetScalarRange()
self._shiftScale.SetShift(-minv)
self._shiftScale.SetScale(65535 / (maxv - minv))
self._shiftScale.Update()
self._writer.Write()
self._module_manager.setProgress(
100.0, "vtkPNGWriter: Writing PNG file(s). [DONE]")
| {
"repo_name": "chrisidefix/devide",
"path": "modules/writers/pngWRT.py",
"copies": "7",
"size": "3025",
"license": "bsd-3-clause",
"hash": 1704198060779807000,
"line_mean": 28.9504950495,
"line_max": 78,
"alpha_frac": 0.5980165289,
"autogenerated": false,
"ratio": 4.054959785522788,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.07204012972926133,
"num_lines": 101
} |
# $Id$
from Module import *
from Hpin import *
from Instance import *
from settings import *
class Hinstance:
def __init__(self, instance, name, mycanvas):
self.instance = instance
# Hierarhcy name
self.name = name
self.mycanvas = mycanvas
self.name_id = None
self.module_name_id = None
self.box_id = None
self.inputs = []
self.outputs = []
mycanvas.design.add_hinstance(self)
if self.instance.is_external_pin() == False:
for i in self.instance.module.inputs:
#self.inputs.append( Hpin(i) )
self.inputs.append( Hpin(self.instance.module.inputs[i]) )
for i in self.instance.module.outputs:
#self.outputs.append( Hpin(i) )
self.outputs.append( Hpin(self.instance.module.outputs[i]) )
elif self.instance.is_external_output() == True:
self.inputs.append( Hpin(self.mycanvas.design.external_input_pin) )
elif self.instance.is_external_input() == True:
self.outputs.append( Hpin(self.mycanvas.design.external_output_pin) )
def add_box_id(self, box_id):
if self.box_id != None:
print "add_box_id(): box_id is overwrited. id=",self.box_id, box_id
self.box_id = box_id
def add_name_id(self, name_id):
self.name_id = name_id
def get_xy(self):
coords = self.mycanvas.canvas.coords(self.box_id)
return [coords[0], coords[1]]
def get_module_name(self):
#print "get_module_name=", self.instance.get_module()
#return self.instance.module.name
return self.instance.get_module().name
def get_name(self):
return self.name
def get_width(self):
return self.instance.width
def get_height(self):
return self.instance.height
def get_output_pin_name(self, ith):
return self.instance.module.outputs[ith].name
def get_input_pin_name(self, ith):
return self.instance.module.inputs[ith].name
def get_tag(self):
return self.instance.get_tag()
def get_instance_box_id(self):
return self.box_id
def output_count(self):
return self.instance.output_count()
def input_count(self):
return self.instance.input_count()
def add_pin_ids(self, pin_ids):
self.pin_ids = pin_ids
#def add_pin_id_in_output(self, ith, id):
# return self.outputs[ith].add_id(id)
#def add_pin_id_in_input(self, ith, id):
# return self.inputs[ith].add_id(id)
| {
"repo_name": "tyamamot5/Verilog-netlist-viewer",
"path": "Hinstance.py",
"copies": "1",
"size": "2664",
"license": "apache-2.0",
"hash": 9087419153623468000,
"line_mean": 28.6,
"line_max": 85,
"alpha_frac": 0.5825825826,
"autogenerated": false,
"ratio": 3.4823529411764707,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4564935523776471,
"avg_score": null,
"num_lines": null
} |
# $Id$
from Module import *
from Pin import *
class Hpin:
def __init__(self, pin):
# Class Pin
self.pin = pin
# An id of the pin rectangle on canvas
self.id = None
# List of net ids connected with the pin on canvas
self.nets = []
def get_id(self):
return self.id
def add_id(self, pin_id):
if self.id != None:
print "Hpin.add_id(): id is overwrited. id=", self.id, pin_id
self.id = pin_id
def add_net_id(self, net_id):
#print "add_net_id(): net_id=", net_id, self.nets
if net_id in self.nets:
pass
else:
self.nets.append(net_id)
#print "add_net_id(): net_id=", net_id
def remove_net_id(self, net_id):
#print "remove_net_id(): net_id=", net_id, self.nets
self.nets.remove(net_id)
#print "remove_net_id(): net_id=", net_id, self.nets
def get_name(self):
return self.pin.name
def is_external_pin(self):
return self.pin.is_external_pin()
| {
"repo_name": "tyamamot5/Verilog-netlist-viewer",
"path": "Hpin.py",
"copies": "1",
"size": "1083",
"license": "apache-2.0",
"hash": -7311546504111538000,
"line_mean": 24.7857142857,
"line_max": 73,
"alpha_frac": 0.5244690674,
"autogenerated": false,
"ratio": 3.301829268292683,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4326298335692683,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from os.path import basename
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.EstimatesParser import EstimatesParser
from Api import Api
from json import dumps
base_url = Api().base_url + 'estimates/'
parser = EstimatesParser()
zoho_http_client = ZohoHttpClient()
class EstimatesApi:
"""Estimates Api class is used to:
1.List all estimates with pagination.
2.Get the details of an estimate.
3.Create an estimate.
4.Update an existing estimate.
5.Delete an existing estimate.
6.Mark a draft estimate as sent.
7.Mark a sent estimate as accepted.
8.Mark a sent estimate as declined.
9.Email an estimate to the customer.
10.Send estimates to your customer by email.
11.Get the email content of an estimate.
12.Export maximum of 25 pdfs as single pdf.
13.Export estimates as pdf and print them.
14.Update the billing address for the estimate.
15.Update the shipping address for the estimate.
16.Get all estimate pdf templates.
17.Update the pdf template associated with the template.
18.Get the complete history and comments of an estimate.
19.Add a comment for an estimate.
20.Update an existing comment of an estimate.
21.Delete an estimate comment.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Estimates api using user's authtoken and
organization id.
Args:
authtoken(str): Authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details = {
'authtoken':authtoken,
'organization_id':organization_id
}
def get_estimates(self, parameter=None):
"""Get list of estimates with pagination.
Args:
parameter(dict, optional): Filter with which the list has to be
dispalyed.
Returns:
instance: Invoice list object.
"""
response = zoho_http_client.get(base_url, self.details, parameter)
return parser.get_list(response)
def get(self, estimate_id, print_pdf=None, accept=None):
"""Get details of an estimate.
Args:
estimate_id(str): Estimate id.
print_pdf(bool, None): True to print pdf else False.
accept(str, None): Get the details in particular format such as
json/pdf/html. Default format is json. Allowed values are json,
pdf and html.
Returns:
instance: Estimate object.
"""
url = base_url + estimate_id
query = {}
if print_pdf is not None and accept is not None:
query = {
'print':print_pdf,
'accept':accept
}
resp = zoho_http_client.getfile(url, self.details, query)
return resp
elif print_pdf is True:
query = {
'print':print_pdf,
'accept':'pdf'
}
resp = zoho_http_client.getfile(url, self.details, query)
return resp
elif accept is not None:
query = {
'accept':accept
}
resp = zoho_http_client.getfile(url, self.details, query)
return resp
else:
response = zoho_http_client.get(url, self.details)
return parser.get_estimate(response)
def create(self, estimate, send=None, ignore_auto_number_generation=None):
"""Create an estimate.
Args:
estimate(instance): Estimate object.
send(bool, optional): True to send estimate to the contact persons
associated with the estimate. Allowed values are true and false.
ignore_auto_number_generation(bool, optional): True to ignore auto
estimate number generation for this estimate. Allowed values
are true and false.
Returns:
instance: Invoice object.
"""
query = {}
if send is not None and ignore_auto_number_generation is not None:
query = {
'send':send,
'ignore_auto_number_generation':ignore_auto_number_generation
}
elif send is not None or ignore_auto_number_generation is not None:
query = {
'send': send
} if send is not None else {
'ignore_auto_number_generation': ignore_auto_number_generation
}
else:
query = None
json_object = dumps(estimate.to_json())
data = {
'JSONString': json_object
}
response = zoho_http_client.post(base_url, self.details, data, query)
return parser.get_estimate(response)
def update(self, estimate_id, estimate,
ignore_auto_number_generation=None):
"""Update an existing estimate.
Args:
estimate_id(str): Estiamte id.
estiamte(instance): Estiamte object.
ignore_auto_number_generation(bool, optional): True to ignore auto
estimate number generation for this estimate. Allowed values
are true and false.
Returns:
instance: Estimates object.
"""
url = base_url + estimate_id
query = {}
if ignore_auto_number_generation is not None:
query = {
'ignore_auto_number_generation': ignore_auto_number_generation
}
else:
query = None
json_object = dumps(estimate.to_json())
data = {
'JSONString': json_object
}
response = zoho_http_client.put(url, self.details, data)
return parser.get_estimate(response)
def delete(self, estimate_id):
"""Delete an existing estimate.
Args:
estimate_id(str): Estimate id.
Returns:
str: Success message('The estimate has been deleted.').
"""
url = base_url + estimate_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def mark_an_estimate_as_sent(self, estimate_id):
"""Mark a draft estimate as sent.
Args:
estimate_id(str): Estimate id.
Returns:
str: Success message('Estimate status has been changed to Sent.').
"""
url = base_url + estimate_id + '/status/sent'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def mark_an_estimate_as_accepted(self, estimate_id):
"""Mark a sent estimate as accepted.
Args:
estimate_id(str): Estimate id.
Returns:
str: Success message('Estimate status has been changed to sent.').
"""
url = base_url + estimate_id + '/status/accepted'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def mark_an_estimate_as_declined(self, estimate_id):
"""Mark a sent estimate as declined.
Args:
estimate_id(str): Estiamte id.
Returns:
str: Success message('Estimate status has been changed to
accepted.').
"""
url = base_url + estimate_id + '/status/declined'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def email_an_estimate(self, estimate_id, email, attachment=None):
"""Email an estimate to the customer.
Args:
estimate_id(str): Estimate id.
email(instance): Email object.
attachment(list of dict): List of dictionary containing details
about files to be attached.
Returns:
instance: Email object.
"""
url = base_url + estimate_id + '/email'
json_object = dumps(email.to_json())
data = {
'JSONString': json_object
}
if attachment is not None:
file_list = []
for value in attachment:
attachments = {
'attachments': {
'filename': basename(value),
'content':open(value).read()
}
}
file_list.append(attachments)
else:
file_list = None
response = zoho_http_client.post(url, self.details, data, None, \
file_list)
return parser.get_message(response)
def email_estimates(self, estimate_id):
"""Email estimates to the customer.
Args:
estimate_id(str): Estimate id.
Returns:
str: Success message('Mission accomplished.').
"""
url = base_url + 'email'
estimate_ids = {
'estimate_ids': estimate_id
}
response = zoho_http_client.post(url, self.details, '', \
estimate_ids)
return parser.get_message(response)
def get_estimate_email_content(self, estimate_id, email_template_id=None):
"""Get estimate email content.
Args:
estiamte_id(str): Estiamte id.
email_template_id(str): Email template id.
Returns:
instance: Email object.
"""
url = base_url + estimate_id + '/email'
query = {}
if email_template_id is not None:
query = {
'email_template_id':email_template_id
}
else:
query = None
response = zoho_http_client.get(url, self.details, query)
return parser.get_email_content(response)
def bulk_export_estimates(self, estimate_id):
"""Export maximum of 25 estimates as pdf.
Args:
estimate_id(str): Estimate id.
Returns:
file: Pdf containing details of estimate.
"""
url = base_url + 'pdf'
query = {
'estimate_ids': estimate_id
}
response = zoho_http_client.getfile(url, self.details, query)
return response
def bulk_print_estimates(self, estimate_ids):
"""Export estimates as pdf and print them.
Args:
estimate_ids(str): Estimate ids.
Returns:
file: Pdf containing details of estimate.
"""
url = base_url + 'print'
query = {
'estimate_ids': estimate_ids
}
response = zoho_http_client.getfile(url, self.details, query)
return response
def update_billing_address(self, estimate_id, address,
is_update_customer=None):
"""Update billing address.
Args:
estimate_id(str): Estiamte id.
address(instance): Address object.
is_update_customer(bool, optional): True to update customer.
Returns:
instance: Address object.
"""
url = base_url + estimate_id + '/address/billing'
json_object = dumps(address.to_json())
data = {
'JSONString': json_object
}
if is_update_customer is not None:
query = {
'is_update_customer':is_update_customer
}
else:
query = None
response = zoho_http_client.put(url, self.details, data, query)
return parser.get_billing_address(response)
def update_shipping_address(self, estimate_id, address,
is_update_customer=None):
"""Update shipping address.
Args:
estimate_id(str): Estimate id.
address(instance): Address object.
is_update_customer(bool, optional): True to update customer
else False.
Returns:
instance: Address object.
"""
url = base_url + estimate_id + '/address/shipping'
json_object = dumps(address.to_json())
data = {
'JSONString': json_object
}
if is_update_customer is not None:
query = {
'is_update_customer': is_update_customer
}
else:
query = None
response = zoho_http_client.put(url, self.details, data, query)
return parser.get_shipping_address(response)
def list_estimate_template(self):
"""List all estimate pdf templates.
Returns:
instance: Template list object.
"""
url = base_url + 'templates'
response = zoho_http_client.get(url, self.details)
return parser.estimate_template_list(response)
def update_estimate_template(self, estimate_id, template_id):
"""Update estimate template.
Args:
estimate_id(str): Estimate id.
template_id(str): Template id.
Returns:
str: Success message('Estimate information has been updated.').
"""
url = base_url + estimate_id + '/templates/' + template_id
response = zoho_http_client.put(url, self.details, '')
return parser.get_message(response)
# Comments and History
def list_comments_history(self, estimate_id):
"""Get complete history and comments.
Args:
estiamte_id(str): Estimate id.
Returns:
list of instance: list of comments object.
"""
url = base_url + estimate_id + '/comments'
resp = zoho_http_client.get(url, self.details)
return parser.get_comments(resp)
def add_comment(self, estimate_id, description, show_comment_to_clients):
"""Add a comment for an estimate.
Args:
estimate_id(str): Estimate id.
description(str): Description.
show_comment_to_clients(bool): True to show comment to clients.
Returns:
instance: Comments object.
"""
url = base_url + estimate_id + '/comments'
data = {}
data['description'] = description
data['show_comment_to_clients'] = show_comment_to_clients
field = {
'JSONString': dumps(data)
}
resp = zoho_http_client.post(url, self.details, field)
return parser.get_comment(resp)
def update_comment(self, estimate_id, comment_id, description,
show_comment_to_clients):
"""Update an existing comment.
Args:
estimate_id(str): Estimate id.
comment_id(str): Comment id.
description(str): Description.
show_comments_to_clients(bool): True to show comments to clients.
Returns:
instance: Comments object.
"""
url = base_url +estimate_id + '/comments/' + comment_id
data = {}
data['description'] = description
data['show_comment_to_clients'] = show_comment_to_clients
field = {
'JSONString': dumps(data)
}
resp = zoho_http_client.put(url, self.details, field)
return parser.get_comment(resp)
def delete_comment(self, estimate_id, comment_id):
"""Delete an existing comment.
Args:
comment_id(str): Comment id.
Returns:
str: Success message('The comment has been deleted.').
"""
url = base_url + estimate_id + '/comments/' + comment_id
resp = zoho_http_client.delete(url, self.details)
return parser.get_message(resp)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/EstimatesApi.py",
"copies": "1",
"size": "15759",
"license": "mit",
"hash": 8430975704426706000,
"line_mean": 30.7721774194,
"line_max": 80,
"alpha_frac": 0.5554920998,
"autogenerated": false,
"ratio": 4.362956810631229,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01966297794002662,
"num_lines": 496
} |
#$Id$#
from os.path import basename
from json import dumps
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.ContactParser import ContactParser
from Api import Api
base_url = Api().base_url + 'contacts/'
parser = ContactParser()
zoho_http_client = ZohoHttpClient()
class ContactsApi:
"""ContactsApi class is used:
1.To get the list of contacts for a particular organization.
2.To get details of particular contact.
3.To create a new contact for an organization.
4.To update a contact.
5.To delete a contact.
6.To mark a contact as active.
7.To mark a contact as inactive.
8.To enable payment reminders for a contact
9.To disable payment reminders for a contact.
10.To send email statement to a contact.
11.To get the statement mail content
12.To send email to a contact.
13.To list recent activities of a contact.
14.To list the refund history of a contact.
15.To track a contact for 1099 reporting.
16.To untrack a contact for 1099 reporting.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Contacts Api using user's authtoken and organization id.
Args:
authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details={
'authtoken': authtoken,
'organization_id': organization_id
}
def get_contacts(self, parameter=None):
"""List of contacts with pagination for a particular organization.
Args:
parameter(dict, optional): Filter with which the list has to be
displayed. Defaults to None.
Returns:
instance: List of contact objects with pagination.
"""
response = zoho_http_client.get(base_url, self.details, parameter)
return parser.get_contacts(response)
def get(self, contact_id):
"""Get details of a contact.
Args:
contact_id(str): Contact_id of a particular contact.
Returns:
instance: Contact object.
"""
url = base_url + contact_id
response = zoho_http_client.get(url, self.details, None)
return parser.get_contact(response)
def create(self, contact):
"""Create a contact.
Args:
contact(instance): Contact object.
Returns:
instance: Contact object.
"""
data = contact.to_json()
field = {
'JSONString':dumps(data)
}
response = zoho_http_client.post(base_url, self.details, field, None)
return parser.get_contact(response)
def update(self, contact_id, contact):
"""Update a contact with the given information.
Args:
contact_id(str): Contact_id of the contact that has to be updated.
contact(instance): Contact object which has the information that
has to be updated.
Returns:
instance: Contact object.
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id
data = contact.to_json()
field = {
'JSONString':dumps(data)
}
response = zoho_http_client.put(url, self.details, field, None)
return parser.get_contact(response)
def delete(self, contact_id):
"""Delete a particular contact.
Args:
contact_id(str): Contact id of the contact to be deleted.
Returns:
str: Success message('The contact has been deleted').
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def mark_active(self, contact_id):
"""Mark a contact as active.
Args:
contact_id(str): Contact id of the contact.
Returns:
str: Success message('The contact has been marked as active').
Raises:
Books exception: If status is not '200' or '201'.
"""
url= base_url + contact_id + '/active'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def mark_inactive(self, contact_id):
"""Mark a contact as inactive.
Args:
contact_id(str): Contact id of the contact.
Returns:
str: Success message('The contact has been marked as inactive').
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/inactive'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def enable_payment_reminder(self, contact_id):
"""Enable automated payment reminders for a contact.
Args:
contact_id(str): Contact id of the contact.
Returns:
str: Success message('All reminders associated with this contact
have been enabled').
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/paymentreminder/enable'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def disable_payment_reminder(self, contact_id):
"""Disable automated payment reminders for a contact.
Args:
contact_id(str): Contact id of the contact.
Returns:
str: Success message('All reminders associated with this contact
have been disabled').
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/paymentreminder/disable'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def email_statement(self, contact_id, email,start_date=None, end_date=None,
attachments=None):
"""Email statement to the contact. If JSONString is not inputted, mail
will be sent with the default mail content.
Args:
contact_id(str): Contact id of the contact.
email(instance): Email.
start_date(str, optional): Starting date of the statement.
Default to None.
end_date(str, optional): Ending date of the statement.
Default to None.
If starting date and ending date is not given current month's
statement will be sent to the contact.
attachments(list): List of files to be attached.
Returns:
str: Success message('Statement has to been sent to the customer').
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/statements/email'
data = {}
data = email.to_json()
if start_date is not None and end_date is not None:
data['start_date'] = start_date
data['end_date'] = end_date
fields = {
'JSONString': dumps(data)
}
if attachments is None:
response = zoho_http_client.post(url, self.details, fields)
else:
file_list = []
for value in attachments:
attachment = {
'attachments': {
'filename': basename(value),
'content': open(value).read()
}
}
file_list.append(attachment)
response = zoho_http_client.post(url, self.details, fields,
None, file_list)
return parser.get_message(response)
def get_statement_mail_content(self, contact_id, start_date, end_date):
"""Get the statement mail content.
Args:
start_date(str): Start date for statement.
end_date(str): End date for statement.
Returns:
instance: Email object.
Raises:
Books exception:if status is not '200' or '201'.
"""
url = base_url + contact_id + '/statements/email'
query_string = {
'start_date': start_date,
'end_date': end_date
}
response = zoho_http_client.get(url, self.details, query_string)
return parser.get_mail_content(response)
def email_contact(self, contact_id, email, attachments=None,
send_customer_statement=None):
"""Send email to contact.
Args:
contact_id(str): Contact id of the contact.
email(instance): Email object.
attachments(list, optional): List of files to be attached.
Default to None.
send_customer_statement(bool, optional): Send customer statement
pdf with email. Default to None.
Returns:
str: Success message('Email has been sent').
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/email'
json_object = dumps(email.to_json())
data = {
'JSONString': json_object
}
if attachments is not None and send_customer_statement is not None:
file_list = []
for value in attachments:
attachment = {
'attachments': {
'filename': basename(value),
'content': open(value).read()
}
}
file_list.append(attachment)
parameter = {
'send_customer_statement': send_customer_statement
}
response = zoho_http_client.post(url, self.details, data,
parameter, file_list)
elif attachments is not None:
file_list = []
for value in attachments:
attachment = {
'attachments': {
'filename': basename(value),
'content': open(value).read()
}
}
file_list.append(attachment)
response = zoho_http_client.post(url, self.details,
data, None, file_list)
elif send_customer_statement is not None:
parameter = {
'send_customer_statement': send_customer_statement
}
response = zoho_http_client.post(url, self.details, data,
parameter)
else:
response = zoho_http_client.post(url, self.details, data)
return parser.get_message(response)
def list_comments(self, contact_id):
"""List recent activities of a contact with pagination.
Args:
contact_id(str): Contact id of the contact.
Returns:
instance: Comments list object.
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/comments'
response = zoho_http_client.get(url, self.details)
return parser.get_comment_list(response)
def get_comments(self, contact_id):
"""List recent activities of a contact.
Args:
contact_id(str): Contact id of the contact.
Returns:
list: List of comments object.
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/comments'
response = zoho_http_client.get(url, self.details)
return parser.get_comment_list(response).get_comments()
def list_refunds(self, contact_id):
"""List the refund history of a contact with pagination.
Args:
contact_id(str): Contact id of the contact.
Returns:
instance: Refunds list object.
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/refunds'
response = zoho_http_client.get(url, self.details)
return parser.get_refund_list(response)
def get_refunds(self, contact_id):
"""List the refund history of a contact.
Args:
contact_id(str): Contact id of the contact.
Returns:
list: List of refunds object.
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/refunds'
response = zoho_http_client.get(url, self.details)
return parser.get_refund_list(response).get_creditnote_refunds()
def track_1099(self, contact_id):
"""Track a contact for 1099 reporting.
Args:
contact_id(str): Contact id of the contact.
Returns:
str: Success message('1099 tracking is enabled').
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/track1099'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def untrack_1099(self, contact_id):
"""Track a contact for 1099 reporting.
Args:
contact_id(str): Contact id of the contact.
Returns:
str: Success message('1099 tracking is enabled').
Raises:
Books exception: If status is not '200' or '201'.
"""
url = base_url + contact_id + '/untrack1099'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/ContactsApi.py",
"copies": "1",
"size": "14086",
"license": "mit",
"hash": -2803078406857362000,
"line_mean": 30.7252252252,
"line_max": 80,
"alpha_frac": 0.5518954991,
"autogenerated": false,
"ratio": 4.41983056165673,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.01076474008827864,
"num_lines": 444
} |
#$Id$#
from os.path import basename
from json import dumps
from books.util.ZohoHttpClient import ZohoHttpClient
from books.parser.InvoicesParser import InvoicesParser
from Api import Api
base_url = Api().base_url + 'invoices/'
parser = InvoicesParser()
zoho_http_client = ZohoHttpClient()
class InvoicesApi:
"""Invoice Api class is used to:
1.List all invoices with pagination.
2.Get the details of an invoice.
3.Create an invoice.
4.Update an existing invoice.
5.Delete an existing invoice.
6.Mark a draft invoice as sent.
7.Mark an invoice status as void.
8.Mark a voided invoice as draft.
9.Email an invoice to the customer.
10.Send invoices to your customer by email.
11.Get the email content of an email.
12.Remind the customer about an unpaid invoice by email.
13.Remind the customer abount unpaid invoices by email.
14.Get the mail content of the payment reminder.
15.Export maximum of 25 invoices as pdf.
16.Export invoices as pdf and print them.
17.Disable automated payment reminders for an invoice.
18.Enable automated payment reminders for an invoice.
19.Write off the invoice balance amount of an invoice.
20.Cancel the write off amount of an invoice.
21.Update the billing address for an existing invoice.
22.Update the shipping address for an existing invoice.
23.Get all invoice pdf templates.
24.Update the pdf template associated with the invoice.
25.Get the list of payments made for an invoice.
26.Get the list of credits applied for an invoice.
27.Apply the customer credits to an invoice.
28.Delete a payment made to an invoice.
29.Delete a particular credit applied to an invoice.
30.Return a file attached to an invoice.
31.Attach a file to an invoice.
32.Send attachment while emailing the invoice.
33.Delete the file attached to the invoice.
34.Delete an expense receipt attached to an invoice.
35.Get the complete history and comments of an invoice.
36.Add a comment for an invoice.
37.Update an existing comment of an invoice.
38.Delete an invoice comment.
"""
def __init__(self, authtoken, organization_id):
"""Initialize Invoice api using user's authtoken and organization
id.
Args:
authtoken(str): User's authtoken.
organization_id(str): User's organization id.
"""
self.details = {
'authtoken':authtoken,
'organization_id':organization_id
}
def get_invoices(self, parameter=None):
"""Get list of all invoices with pagination.
Args:
parameter(dict, optional): Filter with which the list has to be
displayed.
Returns:
instance: Invoice list instance.
"""
response = zoho_http_client.get(base_url, self.details, parameter)
return parser.get_list(response)
def get(self, invoice_id, print_pdf=None, accept=None):
"""Get details of an invoice.
Args:
invoice_id(str): Inovice id.
print_pdf(bool): True to print invoice as pdf else False.
accept(str): Format in which the invoice details has to be
downloaded. Default value is json. Allowed value is json,
pdf and html.
Returns:
instance or file: If accept is None invoice object is returned
else File containing invoice details is returned.
"""
url = base_url + invoice_id
query = {}
if print_pdf is not None and accept is not None:
query = {
'print': print_pdf,
'accept': accept
}
resp = zoho_http_client.getfile(url, self.details, query)
return resp
elif print_pdf is not None:
query = {'print': print_pdf}
if print_pdf is True:
query.update({'accept':'pdf'})
resp = zoho_http_client.getfile(url, self.details, query)
return resp
else:
response = zoho_http_client.get(url, self.details, query)
return parser.get(response)
elif accept is not None:
query = {
'accept': accept
}
resp = zoho_http_client.getfile(url, self.details, query)
return resp
else:
response = zoho_http_client.get(url, self.details)
return parser.get(response)
def create(self, invoice, send=None, ignore_auto_number_generation=None):
"""Creat an invoice.
Args:
invoice(instance): Invoice object.
send(bool, optional): To send invoice to the cocntact persons
associated with the invoice. Allowed values are true and false.
ignore_auto_number_generation(bool, optional): Ignore auto invoice
number generation for the invoice. This mandates the invoice
number. Allowed values are true and false.
Returns:
instance: Invoice object.
"""
json_object = dumps(invoice.to_json())
data = {
'JSONString': json_object
}
query = {}
if send is not None and ignore_auto_number_generation is not None:
query = {
'send':send,
'ignore_auto_number_generation': ignore_auto_number_generation
}
elif send is not None or ignore_auto_number_generation is not None:
query = {
'send': send
} if send is not None else {
'ignore_auto_number_generation': \
ignore_auto_number_generation
}
else:
query = None
response = zoho_http_client.post(base_url, self.details, data, query)
return parser.get(response)
def update(self, invoice_id, invoice, ignore_auto_number_generation=None):
"""Update an existing invoice.
Args:
invoie_id(str): Invoice id.
invoice(instance): Invoice object.
ignore_auto_number_generation(bool, optional): Ignore auto invoice
number generation for the invoice. This mandates the invoice
number. Allowed values are true and false.
Returns:
instance: Invoice object.
"""
url = base_url + invoice_id
json_object = dumps(invoice.to_json())
data = {
'JSONString': json_object
}
if ignore_auto_number_generation is not None:
query = {
'ignore_auto_number_generation': ignore_auto_number_generation
}
else:
query = None
response = zoho_http_client.put(url, self.details, data, query)
return parser.get(response)
def delete(self, invoice_id):
"""Delete an existing invoice.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('The invoice has been deleted.').
"""
url = base_url + invoice_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def mark_an_invoice_as_sent(self, invoice_id):
"""Mark an invoice as sent.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('Inovice status has been changed to sent.').
"""
url = base_url + invoice_id + '/status/sent'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def void_an_invoice(self, invoice_id):
"""Mark an invoice as void.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('Invoice status has been changed to void.').
"""
url = base_url + invoice_id + '/status/void'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def mark_as_draft(self, invoice_id):
"""Mark a voided invoice as draft.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('Status of invoice changed form void to
draft.').
"""
url = base_url + invoice_id + '/status/draft/'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def email_an_invoice(self, invoice_id, email, attachment=None, \
send_customer_statement=None, send_attachment=None):
"""Email an invoice to the customer.
Args:
invoice_id(str): Invoice id.
email(instance): Email object.
attachment(list of dict, optional): List of dictionary objects
containing details of files to be attached.
send_customer_statement(bool, optional): True to send customer
statement pdf with email else False.
send_attachment(bool, optional): True to send the attachment with
mail else False.
Returns:
str: Success message('Your invoice has been sent.').
"""
url = base_url + invoice_id + '/email'
json_object = dumps(email.to_json())
data = {
'JSONString': json_object
}
if attachment is not None and send_customer_statement is not None and \
send_attachment is not None:
query = {
'send_customer_statement': send_customer_statement,
'send_attachment': send_attachment
}
file_list = []
for value in attachment:
attachments = {
'attachments': {
'filename': basename(value),
'content':open(value).read()
}
}
file_list.append(attachments)
elif attachment is not None and send_customer_statement is not None:
query = {
'send_customer_statement':send_customer_statement,
}
file_list = []
for value in attachment:
attachments = {
'attachments': {
'filename': basename(value),
'content': open(value).read()
}
}
file_list.append(attachments)
elif attachment is not None and send_attachment is not None:
query = {
'send_attachment':send_attachment
}
file_list = []
for value in attachment:
attachments = {
'attachments': {
'filename':basename(value),
'content':open(value).read()
}
}
file_list.append(attachments)
elif send_customer_statement is not None and send_attachment is not None:
query = {
'send_customer_statement':send_customer_statement,
'send_attachment':send_attachment
}
file_list = None
elif attachment is not None:
file_list = []
for value in attachment:
attachments = {
'attachments': {
'filename': basename(value),
'content':open(value).read()
}
}
file_list.append(attachments)
query = None
elif send_customer_statement is not None:
query = {
'send_customer_statement':send_customer_statement,
}
file_list = None
elif send_attachment is not None:
query = {
'send_attachment':send_attachment
}
file_list = None
else:
query = None
file_list = None
response = zoho_http_client.post(url, self.details, data, query, file_list)
return parser.get_message(response)
def email_invoices(self, contact_id, invoice_ids, email=None, \
snail_mail=None):
"""Send invoice to customers by email.
Args:
contact_id(list of str): List of Contact ids.
invoice_ids(str): Comma separated Invoice ids which are to
be emailed.
email(bool, optional): True to send via email.
snail_mail(bool, optional): True to send via snail mail.
Returns:
str: Success message('Mission accomplished! We've sent all
the invoices.').
"""
query = {
'invoice_ids': invoice_ids
}
url = base_url + 'email'
data = {}
data['contacts'] = []
for value in contact_id:
contacts = {
'contact_id':value,
'email':True,
'snail_mail': False
}
if (email is not None) and (snail_mail is not None):
contacts = {
'contact_id':value,
'email':email,
'snail_mail':snail_mail
}
data['contacts'].append(contacts)
fields = {
'JSONString': dumps(data)
}
response = zoho_http_client.post(url, self.details, fields, query)
return parser.get_message(response)
def get_email_content(self, invoice_id, email_template_id=None):
"""Get the email content of an invoice.
Args:
invoice_id(str): Invoice id.
email_template_id(str, optional): Email template id. If None
default template id will be inputted.
Returns:
instance: Email object.
"""
url = base_url + invoice_id + '/email'
if email_template_id is not None:
query = {
'email_template_id':email_template_id
}
else:
query = None
response = zoho_http_client.get(url, self.details, query)
return parser.get_content(response)
def remind_customer(self, invoice_id, email, attachment=None, \
send_customer_statement=None):
"""Remind customers abount unpaid invoices.
Args:
invoice_id(str): Invoice id.
email(instance): Email object.
attachment(list of dict, optional): List of dictionary objects
containing details of files to be attached.
send_customer_statement(bool, optional): True to send customer
statement along with email else False.
Returns:
str: Success message('Your payment reminder has been sent.').
"""
url = base_url + invoice_id + '/paymentreminder'
json_object = dumps(email.to_json())
data = {
'JSONString': json_object
}
if send_customer_statement is not None and attachment is not None:
query = {
'send_customer_statement':send_customer_statement
}
file_list = []
for value in attachment:
attachments = {
'attachments': {
'filename': basename(value),
'content':open(value).read()
}
}
file_list.append(attachments)
elif send_customer_statement is not None:
query = {
'send_customer_statement':send_customer_statement
}
file_list = None
elif attachment is not None:
file_list = []
for value in attachment:
attachments = {
'attachments': {
'filename': basename(value),
'content':open(value).read()
}
}
file_list.append(attachments)
query = None
else:
query = None
file_list = None
response = zoho_http_client.post(url, self.details, data, query, file_list)
return parser.get_message(response)
def bulk_invoice_reminder(self, invoice_id):
"""Remind customers about unpaid invoices.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('Success! All reminders have been sent.').
"""
url = base_url + 'paymentreminder'
invoice_ids = {
'invoice_ids': invoice_id
}
response = zoho_http_client.post(url, self.details, '', invoice_ids)
return parser.get_message(response)
def get_payment_reminder_mail_content(self, invoice_id):
"""Get the mail content of the payment reminder.
Args:
invoice_id(str): Invoice id.
Returns:
instance: Email object.
"""
url = base_url + invoice_id + '/paymentreminder'
response = zoho_http_client.get(url, self.details)
return parser.payment_reminder_mail_content(response)
def bulk_export_invoices(self, invoice_id):
"""Export maximum of 25 invoices as pdf.
Args:
invoice_id(str): Comma separated invoice ids which are to be
exported as pdfs.
Returns:
file: Pdf file containing invoice details.
"""
url = base_url + 'pdf'
query = {
'invoice_ids': invoice_id
}
response = zoho_http_client.getfile(url, self.details, query)
return response
def bulk_print_invoices(self, invoice_id):
"""Export invoices as pdf and print them.
Args:
invoice_id(str): Invoice id.
Returns:
file: File that has to be printed.
"""
url = base_url + 'pdf'
invoice_ids = {
'invoice_ids': invoice_id
}
response = zoho_http_client.getfile(url, self.details, invoice_ids)
return response
def disable_payment_reminder(self, invoice_id):
"""Disable payment reminer.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('Reminders stopped.').
"""
url = base_url + invoice_id + '/paymentreminder/disable'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def enable_payment_reminder(self, invoice_id):
"""Enable payment reminder.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('Reminders enabled.').
"""
url = base_url + invoice_id + '/paymentreminder/enable'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def write_off_invoice(self, invoice_id):
"""Write off the invoice balance amount of an invoice.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('Invoice has been written off.').
"""
url = base_url + invoice_id + '/writeoff'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def cancel_write_off(self, invoice_id):
"""Cancel the write off amount of an invoice.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('The writeoff done for this invoice
has been cancelled.').
"""
url = base_url + invoice_id + '/writeoff/cancel'
response = zoho_http_client.post(url, self.details, '')
return parser.get_message(response)
def update_billing_address(self, invoice_id, address, \
is_update_customer=None):
"""Update billing address for the invoice.
Args:
invoice_id(str): Invoice id.
address(instance): Address object.
is_update_customer(bool, optional): True to update the address
for all draft, unpaid invoice and future invoices.
Returns:
instance: Address object.
"""
url = base_url + invoice_id + '/address/billing'
json_object = dumps(address.to_json())
data = {
'JSONString': json_object
}
if is_update_customer is not None:
query = {
'is_update_customer': is_update_customer
}
else:
query = None
response = zoho_http_client.put(url, self.details, data, query)
return parser.get_billing_address(response)
def update_shipping_address(self, invoice_id, address, \
is_update_customer=None):
"""Update shipping address for the invoice.
Args:
invoice_id(str): Invoice id.
address(instance): Address object.
is_update_customer(bool, optional): True to update the address
for all draft, unpaid invoice and future invoices.
Returns:
instance: Address object.
"""
url = base_url + invoice_id + '/address/shipping'
json_object = dumps(address.to_json())
data = {
'JSONString': json_object
}
if is_update_customer is not None:
query = {
'is_update_customer': is_update_customer
}
else:
query = None
response = zoho_http_client.put(url, self.details, data, query)
return parser.get_shipping_address(response)
def list_invoice_templates(self):
"""Get all invoice pdf templates.
Returns:
instance: Invoice template list object.
"""
url = base_url + 'templates'
response = zoho_http_client.get(url, self.details)
return parser.invoice_template_list(response)
def update_invoice_template(self, invoice_id, template_id):
"""Update the pdf template associated with the invoice.
Args:
invoice_id(str): Invoice id.
template_id(str): Tempalte id.
Returns:
str: Success message('Invoice information has been updated.').
"""
url = base_url + invoice_id + '/templates/' + template_id
response = zoho_http_client.put(url, self.details, '')
return parser.get_message(response)
## Payments and Credits--------------------------------------------------------
def list_invoice_payments(self, invoice_id):
"""List the payments made for an invoice.
Args:
invoice_id(str): Invoice id.
Returns:
list of payments instance: List of Invoice payments list object.
"""
url = base_url + invoice_id + '/payments'
response = zoho_http_client.get(url, self.details)
return parser.payments_list(response)
def list_credits_applied(self, invoice_id):
"""Get the list of credits applied for an invoice.
Args:
invoice_id(str): Invoice id.
Returns:
list of credits instance: List of credits object.
"""
url = base_url + invoice_id + '/creditsapplied'
response = zoho_http_client.get(url, self.details)
return parser.credits_list(response)
def apply_credits(self, invoice_id, payments_and_credits):
"""Apply the customer credits either from credit notes or
excess customer payments to an invoice.
Args:
invoice_id(str): Invoice id.
payments_and_credits(instance): Payments object.
Returns:
instance: Payments and credits object.
"""
url = base_url + invoice_id + '/credits'
data = {}
invoice_payments = []
apply_credits = []
for value in payments_and_credits.invoice_payments:
payments = {}
payments['payment_id'] = value.get_payment_id()
payments['amount_applied'] = value.get_amount_applied()
invoice_payments.append(payments)
for value in payments_and_credits.apply_creditnotes:
credits = {}
credits['creditnote_id'] = value.get_creditnote_id()
credits['amount_applied'] = value.get_amount_applied()
apply_credits.append(credits)
data['invoice_payments'] = invoice_payments
data['apply_creditnotes'] = apply_credits
json_string = {
'JSONString': dumps(data)
}
response = zoho_http_client.post(url, self.details, json_string)
return parser.apply_credits(response)
def delete_payment(self, invoice_id, invoice_payment_id):
"""Delete a payment made to an invoice.
Args:
invoice_id(str): Invoice id.
invoice_payment_id(str): Invoice payment id.
Returns:
str: Success message('The payment has been deleted.').
"""
url = base_url + invoice_id + '/payments/' + invoice_payment_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def delete_applied_credit(self, invoice_id, creditnotes_invoice_id):
"""Delete a particular credit applied to an invoice.
Args:
invoice_id(str): Invoice id.
creditnotes_invoice_id(str): Creditnotes invoice id.
Returns:
str: Success message('Credits applied to an invoice have been
deleted.').
"""
url = base_url + invoice_id + '/creditsapplied/' + \
creditnotes_invoice_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
## Attachment------------------------------------------------------------------
def get_an_invoice_attachment(self, invoice_id, preview = None):
"""Get an invoice attachment.
Args:
invoice_id(str): Invoice id
preview(bool): True to get the thumbnail of the attachment.
Returns:
file: File attached to the invoice.
"""
url = base_url + invoice_id + '/attachment'
query_string = {
'preview': str(preview)
} if preview is not None else None
response = zoho_http_client.getfile(url, self.details, query_string)
return response
def add_attachment_to_an_invoice(self, invoice_id, attachment, \
can_send_in_mail=None):
"""Add a file to an invoice.
Args:
invoice_id(str): Invoice id.
attachment(list of dict): List of dict containing details of the
files to be attached.
can_send_in_mail(bool, optional): True to send the attachment with
the invoice when emailed.
Returns:
str: Success message('Your file has been successfully attached to
the invoice.').
"""
url = base_url + invoice_id + '/attachment'
file_list = []
for value in attachment:
attachments = {
'attachment': {
'filename':basename(value),
'content':open(value).read()
}
}
file_list.append(attachments)
if can_send_in_mail is not None:
query = {
'can_send_in_mail':can_send_in_mail
}
else:
query = None
data = {
'JSONString': ''
}
response = zoho_http_client.post(url, self.details, data, query, \
file_list)
return parser.get_message(response)
def update_attachment_preference(self, invoice_id, can_send_in_mail):
"""Update whether to send attached file while emailing the invoice.
Args:
invoice_id(str): Invoice id.
can_send_in_mail(bool): Boolean to send attachment with the
invoice when emailed.
Returns:
str: Success message('Invoice information has been updated.').
"""
url = base_url + invoice_id + '/attachment'
query = {
'can_send_in_mail':can_send_in_mail,
}
data = {
'JSONString': ''
}
response = zoho_http_client.put(url, self.details, data, query)
return parser.get_message(response)
def delete_an_attachment(self, invoice_id):
"""Delete the file attached to the invoice.
Args:
invoice_id(str): Invoice id.
Returns:
str: Success message('Your file is no longer attached
to the invoice.').
"""
url = base_url + invoice_id + '/attachment'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def delete_expense_receipt(self, expense_id):
"""Delete the expense receipts attached to an invoice which
is raised from an expense.
Args:
expense_id: Expense id.
Returns:
str: Success message('The attached expense receipt has
been deleted.').
"""
url = base_url + 'expenses/' + expense_id + '/receipt'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
#### Comments and History -----------------------------------------------------
def list_invoice_comments_history(self, invoice_id):
"""Get the complete history and comments of an invoice.
Args:
invoice_id(str): Invoice_id.
Returns:
instance: Comments list object.
"""
url = base_url + invoice_id + '/comments'
response = zoho_http_client.get(url, self.details)
return parser.comments_list(response)
def add_comment(self, invoice_id, comments):
"""Add comment for an invoice.
Args:
invoice_id(str): Invoice id.
comments(instance): Comments object.
Returns:
str: Success message('Comments added.').
"""
url = base_url + invoice_id + '/comments'
data = {}
data['payment_expected_date'] = comments.get_payment_expected_date()
data['description'] = comments.get_description()
data['show_comment_to_clients'] = \
comments.get_show_comment_to_clients()
json_string = {
'JSONString': dumps(data)
}
response = zoho_http_client.post(url, self.details, json_string)
return parser.get_comment(response)
def update_comment(self, invoice_id, comment_id, comments):
"""Update an existing comment of an invoice.
Args:
invoice_id(str): Invoice id.
comment_id(str): Comment id.
comments(instance): Comments object.
Returns:
instance: Comments object.
"""
url = base_url + invoice_id + '/comments/' + comment_id
data = {}
data['description'] = comments.get_description()
data['show_comment_to_clients'] = \
comments.get_show_comment_to_clients()
json_string = {
'JSONString': dumps(data)
}
response = zoho_http_client.put(url, self.details, json_string)
return parser.get_comment(response)
def delete_comment(self, invoice_id, comment_id):
"""Delete an invoice comment.
Args:
invoice_id(str): Invoice id.
comment_id(str): Comment id.
Returns:
str: Success message.('The comment has been deleted.').
"""
url = base_url + invoice_id + '/comments/' + comment_id
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/api/InvoicesApi.py",
"copies": "1",
"size": "32575",
"license": "mit",
"hash": -6836507186813786000,
"line_mean": 33.0386624869,
"line_max": 83,
"alpha_frac": 0.5449884881,
"autogenerated": false,
"ratio": 4.480126530050887,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5525115018150887,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.api.PortalsApi import PortalsApi
from projects.api.ProjectsApi import ProjectsApi
from projects.api.DashboardApi import DashboardApi
from projects.api.MilestonesApi import MilestonesApi
from projects.api.TaskListApi import TaskListApi
from projects.api.TasksApi import TasksApi
from projects.api.TimesheetsApi import TimesheetsApi
from projects.api.BugsApi import BugsApi
from projects.api.EventsApi import EventsApi
from projects.api.DocumentsApi import DocumentsApi
from projects.api.FoldersApi import FoldersApi
from projects.api.ForumsApi import ForumsApi
from projects.api.CategoryApi import CategoryApi
from projects.api.UsersApi import UsersApi
class ZohoProjects:
"""This class is used to create an object for projects service and to provide instance for all APIs."""
def __init__(self, authtoken, portal_id=None):
"""Initialize parameters for zoho projects.
Args:
authtoken(str): User's Authtoken.
portal_name(str): User's portal name.
"""
self.authtoken = authtoken
self.portal_id = portal_id
def get_portals_api(self):
"""Get instance for portals api.
Returns:
instance: Portals api instance.
"""
portals_api = PortalsApi(self.authtoken)
return portals_api
def get_projects_api(self):
"""Get instance for projects api.
Returns:
instance: Projects api instance.
"""
projects_api = ProjectsApi(self.authtoken, self.portal_id)
return projects_api
def get_dashboard_api(self):
"""Get instance for dashboard api.
Returns:
instance: Dashboard api instance.
"""
dashboard_api = DashboardApi(self.authtoken, self.portal_id)
return dashboard_api
def get_milestone_api(self):
"""Get instance for milestone api.
Returns:
instance: Milestone api instance.
"""
milestone_api = MilestonesApi(self.authtoken, self.portal_id)
return milestone_api
def get_tasklist_api(self):
"""Get instance for tasklist api.
Returns:
instance: Tasklist api instance.
"""
tasklist_api = TaskListApi(self.authtoken, self.portal_id)
return tasklist_api
def get_tasks_api(self):
"""Get instance for tasks api.
Returns:
instance: Tasks api instance.
"""
tasks_api = TasksApi(self.authtoken, self.portal_id)
return tasks_api
def get_timesheets_api(self):
"""Get instance for timesheets api.
Returns:
instance: Timesheets api.
"""
timesheets_api = TimesheetsApi(self.authtoken, self.portal_id)
return timesheets_api
def get_bugs_api(self):
"""Get instance for bugs api.
Returns:
instance: Bugs api.
"""
bugs_api = BugsApi(self.authtoken, self.portal_id)
return bugs_api
def get_events_api(self):
"""Get instance for events api.
Returns:
instance: Events api.
"""
events_api = EventsApi(self.authtoken, self.portal_id)
return events_api
def get_documents_api(self):
"""Get instance for Documents api.
Returns:
instance: Documents api.
"""
documents_api = DocumentsApi(self.authtoken, self.portal_id)
return documents_api
def get_folders_api(self):
"""Get instance for folders api.
Returns:
instance: Folders api.
"""
folders_api = FoldersApi(self.authtoken, self.portal_id)
return folders_api
def get_forums_api(self):
"""Get instance for forums api.
Returns:
instance: Forums api.
"""
forums_api = ForumsApi(self.authtoken, self.portal_id)
return forums_api
def get_category_api(self):
"""Get instance for category api.
Returns:
instance: Category api.
"""
category_api = CategoryApi(self.authtoken, self.portal_id)
return category_api
def get_users_api(self):
"""Get instance for users api.
Returns:
instance: Users api.
"""
users_api = UsersApi(self.authtoken, self.portal_id)
return users_api
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/service/ZohoProjects.py",
"copies": "1",
"size": "4420",
"license": "mit",
"hash": -6596545171650901000,
"line_mean": 24.549132948,
"line_max": 107,
"alpha_frac": 0.6158371041,
"autogenerated": false,
"ratio": 4.138576779026217,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5254413883126218,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.model.Activity import Activity
from projects.model.Status import Status
class DashboardParser:
"""This class is used to create object for Dashboard parser."""
def get_activities(self, resp):
"""This method parses the given response and returns list of activities.
Args:
resp(dict): Dictionary containing json object for activities.
Returns:
list of instance: List of activity object.
"""
activities = []
for value in resp['activities']:
activity = self.get_activity(value)
activities.append(activity)
return activities
def get_activity(self, resp):
"""This method parses the given response and returns activity object.
Args:
resp(dict): Dictionary containing json object for activity.
Returns:
instance: Activity object.
"""
activity = Activity()
if 'id' in resp:
activity.set_id(resp['id'])
if 'state' in resp:
activity.set_state(resp['state'])
if 'activity_for' in resp:
activity.set_activity_for(resp['activity_for'])
if 'name' in resp:
activity.set_name(resp['name'])
if 'activity_by' in resp:
activity.set_activity_by(resp['activity_by'])
if 'time_long' in resp:
activity.set_time_long(resp['time_long'])
if 'display_time' in resp:
activity.set_display_time(resp['display_time'])
if 'time' in resp:
activity.set_time(resp['time'])
return activity
def get_statuses(self, resp):
"""This method parses the given response and returns list of status object.
Args:
resp(dict): Response containing json object for status.
Returns:
list of instance: List of status object.
"""
statuses = []
for value in resp['statuses']:
status = self.get_status(value)
statuses.append(status)
return statuses
def get_status(self, resp):
"""This method parses the json response for status.
Args:
resp(dict): Dictionary containing json response for status.
Returns:
instance: Status object.
"""
status = Status()
if 'id' in resp:
status.set_id(resp['id'])
if 'content' in resp:
status.set_content(resp['content'])
if 'posted_by' in resp:
status.set_posted_by(resp['posted_by'])
if 'posted_person' in resp:
status.set_posted_person(resp['posted_person'])
if 'posted_time' in resp:
status.set_posted_time(resp['posted_time'])
if 'posted_time_long' in resp:
status.set_posted_time_long(resp['posted_time_long'])
return status
def to_json(self, status):
"""This method is used to convert status object to json form.
Args:
status(instance): Status object.
Returns:
dict: Dictionary containing json object for status.
"""
data = {}
if status.get_content() != "":
data['content'] = status.get_content()
return data
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/DashboardParser.py",
"copies": "1",
"size": "3287",
"license": "mit",
"hash": -483807985206549250,
"line_mean": 28.8818181818,
"line_max": 83,
"alpha_frac": 0.5701247338,
"autogenerated": false,
"ratio": 4.459972862957938,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.03032365459049176,
"num_lines": 110
} |
#$Id$
from projects.model.Bug import Bug
from projects.model.Project import Project
from projects.model.Defaultfield import Defaultfield
from projects.model.Customfield import Customfield
class BugsParser:
"""This class is used to parse the json response for Bugs."""
def to_json(self, bug):
"""This method is used to create json object for bug object.
Args:
bug(instance): Bug object.
Returns:
dict: Json object for bugs object.
"""
data = {}
if bug.get_title() != "":
data['title'] = bug.get_title()
if bug.get_description() != "":
data['description'] = bug.get_description()
if bug.get_assignee_id() != 0:
data['assignee'] = bug.get_assignee_id()
if bug.get_flag() != "":
data['flag'] = bug.get_flag()
if bug.get_classification_id() != 0:
data['classification_id'] = bug.get_classification_id()
if bug.get_milestone_id() != 0:
data['milestone_id'] = bug.get_milestone_id()
if bug.get_due_date() != "":
data['due_date'] = bug.get_due_date()
if bug.get_module_id() != 0:
data['module_id'] = bug.get_module_id()
if bug.get_severity_id() != 0:
data['severity_id'] = bug.get_severity_id()
if bug.get_reproducible_id() != 0:
data['reproducible_id'] = bug.get_reproducible_id()
return data
def get_bug(self, resp):
"""This method parses the given response and returns bug object.
Args:
resp(dict): Response containing json object for bug.
Returns:
instance: Bug object.
"""
bug = Bug()
if 'id' in resp:
bug.set_id(resp['id'])
if 'key' in resp:
bug.set_key(resp['key'])
if 'project' in resp:
if 'id' in resp['project']:
project = Project()
project.set_id(resp['project']['id'])
bug.set_project(project)
if 'flag' in resp:
bug.set_flag(resp['flag'])
if 'title' in resp:
bug.set_title(resp['title'])
if 'reporter_id' in resp:
bug.set_reporter_id(resp['reporter_id'])
if 'reported_person' in resp:
bug.set_reported_person(resp['reported_person'])
if 'created_time' in resp:
bug.set_created_time(resp['created_time'])
if 'created_time_format' in resp:
bug.set_created_time_format(resp['created_time_format'])
if 'created_time_long' in resp:
bug.set_created_time_long(resp['created_time_long'])
if 'assignee_name' in resp:
bug.set_assignee_name(resp['assignee_name'])
if 'classification' in resp:
if 'id' in resp['classification']:
bug.set_classification_id(resp['classification']['id'])
if 'type' in resp['classification']:
bug.set_classification_type(resp['classification']['type'])
if 'severity' in resp:
if 'id' in resp['severity']:
bug.set_severity_id(resp['severity']['id'])
if 'type' in resp['severity']:
bug.set_severity_type(resp['severity']['type'])
if 'status' in resp:
if 'id' in resp['status']:
bug.set_status_id(resp['status']['id'])
if 'type' in resp['status']:
bug.set_status_type(resp['status']['type'])
if 'closed' in resp:
bug.set_closed(resp['closed'])
if 'reproducible' in resp:
if 'id' in resp['reproducible']:
bug.set_reproducible_id(resp['reproducible']['id'])
if 'type' in resp['reproducible']:
bug.set_reproducible_type(resp['reproducible']['type'])
if 'module' in resp:
if 'id' in resp['module']:
bug.set_module_id(resp['module']['id'])
if 'name' in resp['module']:
bug.set_module_name(resp['module']['name'])
if 'link' in resp:
link = resp['link']
if 'self' in link:
if 'url' in link['self']:
bug.set_url(link['self']['url'])
if 'timesheet' in link:
if 'url' in link['timesheet']:
bug.set_timesheet_url(link['timesheet']['url'])
return bug
def get_bugs(self, resp):
"""This method parses the given response and returns list of bugs object.
Args:
resp(dict): Dictionary containing json response for bugs.
Returns:
list of instance: List of bugs object.
"""
bugs = []
for value in resp['bugs']:
bug = self.get_bug(value)
bugs.append(bug)
return bugs
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['response']
def get_default_fields(self, resp):
"""
Parse the JSON response and make it into the Default field object.
Args:
resp(dict): Response cotains the details of the default fields.
Returns:
instance: Defaultfield object.
"""
defaultfield = Defaultfield();
if 'defaultfields' in resp:
defaultfields = resp['defaultfields'];
if 'severity_details' in defaultfields:
severity_details = defaultfields['severity_details'];
severitydetails = [];
for json in severity_details:
severitydetails.append(self.json_to_dict(json));
defaultfield.set_severity_details(severitydetails);
if 'status_deatils' in defaultfields:
status_deatils = defaultfields['status_deatils'];
statusdeatils = [];
for json in status_deatils:
statusdeatils.append(self.json_to_dict(json));
defaultfield.set_status_deatils(statusdeatils);
if 'module_details' in defaultfields:
module_details = defaultfields['module_details'];
moduledetails = [];
for json in module_details:
moduledetails.append(self.json_to_dict(json));
defaultfield.set_module_details(moduledetails);
if 'priority_details' in defaultfields:
priority_details = defaultfields['priority_details'];
prioritydetails = [];
for json in priority_details:
prioritydetails.append(self.json_to_dict(json));
defaultfield.set_priority_details(prioritydetails);
if 'classification_details' in defaultfields:
classification_details = defaultfields['classification_details'];
classificationdetails = [];
for json in classification_details:
classificationdetails.append(self.json_to_dict(json));
defaultfield.set_classification_details(classificationdetails);
return defaultfield;
def json_to_dict(self, json):
'''
Parse the JSON response into dict object.
Args:
json(dict): Dictionary object.
Returns:
dict : Returns the dictionary object.
'''
details = {};
for key,value in json.items():
details[key] = value;
return details;
def get_custom_fields(self, resp):
'''
Parse the JSON response and make it into the list of Customfield object.
Args:
resp(dict): Response cotains the details of the custom fields.
Returns:
list of instance: Returns list of Customfield object.
'''
customfields_list = [];
if 'customfields' in resp:
customfields = resp['customfields'];
for json_obj in customfields:
customfields_list.append(self.json_to_customfield(json_obj));
return customfields_list;
def json_to_customfield(self, json_obj):
'''
Parse the JSON object into Customfield object.
Args:
json_obj(dict): JSON response contains the details of the custom field.
Returns:
instance: Returns the Customfield object.
'''
customfield = Customfield();
if 'label_name' in json_obj:
customfield.set_label_name(json_obj['label_name']);
if 'column_name' in json_obj:
customfield.set_column_name(json_obj['column_name']);
if 'default_Value' in json_obj:
customfield.set_default_value(json_obj['default_Value']);
if 'picklist_values' in json_obj:
picklist_values = json_obj['picklist_values'];
picklistvalues = []
for i in range(len(picklist_values)):
picklistvalues.append(picklist_values[i]);
customfield.set_picklist_values(picklistvalues);
return customfield;
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/BugsParser.py",
"copies": "1",
"size": "9374",
"license": "mit",
"hash": -7938851856737323000,
"line_mean": 28.29375,
"line_max": 86,
"alpha_frac": 0.5456582035,
"autogenerated": false,
"ratio": 3.8386568386568385,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4884315042156839,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.model.Category import Category
class CategoryParser:
"""This class is used to parse the json object for category."""
def get_categories(self, resp):
"""This method parses the json for categories list.
Args:
resp(dict):Response containing json for categories list.
Returns:
list of instance: List of categories object.
"""
categories = []
for value in resp['categories']:
category = self.to_category(value)
categories.append(category)
return categories
def to_category(self, resp):
"""This method parses the given response and returns category object.
Args:
resp(dict): Dictionary containing json object for category.
Returns:
instance: Category object.
"""
category = Category()
if 'id' in resp:
category.set_id(resp['id'])
if 'name' in resp:
category.set_name(resp['name'])
return category
def to_json(self, category):
"""This method is used to convert category object to json format.
Args:
category(instance): Category object.
Returns:
dict: Dictionary containing json object for category object.
"""
data = {}
if category.get_name() != "":
data['name'] = category.get_name()
return data
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/CategoryParser.py",
"copies": "1",
"size": "1464",
"license": "mit",
"hash": 6974675219726831000,
"line_mean": 25.6181818182,
"line_max": 77,
"alpha_frac": 0.5710382514,
"autogenerated": false,
"ratio": 4.979591836734694,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6050630088134694,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.model.Document import Document
from projects.model.Folder import Folder
from projects.model.Version import Version
class DocumentsParser:
"""This class parses the given response for documents."""
def get_documents(self, resp):
"""This method parses the given response for documents.
Args:
resp(dict): Response containing json for documents.
Returns:
list of instance: List of documents object.
"""
documents = []
for value in resp['documents']:
document = self.json_to_document(value)
documents.append(document)
return documents
def json_to_document(self, resp):
"""This method is used to parse the json response and to return document object.
Args:
resp(dict): Response containing json object for document.
Returns:
instance: Document object.
"""
document = Document()
if 'id' in resp:
document.set_id(resp['id'])
if 'file_name' in resp:
document.set_file_name(resp['file_name'])
if 'content_type' in resp:
document.set_content_type(resp['content_type'])
if 'versions' in resp:
for value in resp['versions']:
version = Version()
if 'id' in value:
version.set_id(value['id'])
if 'uploaded_by' in value:
version.set_uploaded_by(value['uploaded_by'])
if 'description' in value:
version.set_description(value['description'])
if 'file_size' in value:
version.set_file_size(value['file_size'])
if 'version' in value:
version.set_version(value['version'])
if 'uploaded_date' in value:
version.set_uploaded_date(value['uploaded_date'])
if 'uploaded_date_long in value':
version.set_uploaded_date_long(value['uploaded_date_long'])
document.set_versions(version)
if 'folder' in resp:
folder = Folder()
folder.set_id(resp['folder']['id'])
folder.set_name(resp['folder']['name'])
folder.set_is_discussion(resp['folder']['is_discussion'])
document.set_folder(folder)
if 'link' in resp:
if 'self' in resp['link']:
if 'url' in resp['link']['self']:
document.set_url(resp['link']['self']['url'])
return document
def to_json(self, document):
"""This method is used to convert document object to json format.
Args:
document(instance): Document object.
Returns:
dict: Dictionary containing json object for document.
"""
data = {}
if document.get_folder().get_id() != 0:
data['folder_id'] = document.get_folder().get_id()
if document.get_description() != "":
data['description'] = document.get_description()
if document.get_tags() != "":
data['tags'] = document.get_tags()
if document.get_notify() != 0:
data['notify'] = document.get_notify()
return data
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['response']
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/DocumentsParser.py",
"copies": "1",
"size": "3618",
"license": "mit",
"hash": 4350892766672226300,
"line_mean": 33.7884615385,
"line_max": 88,
"alpha_frac": 0.54671089,
"autogenerated": false,
"ratio": 4.59720457433291,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.011533975741579966,
"num_lines": 104
} |
#$Id$
from projects.model.Event import Event
from projects.model.Participant import Participant
class EventsParser:
"""This class is used to parse the json response for events."""
def get_events(self, resp):
"""This method parses the given repsonse for list of events.
Args:
resp(dict): Response containing json object for events.
Returns:
list of instance: List of event object.
"""
events = []
for value in resp['events']:
event = self.json_to_event(value)
events.append(event)
return events
def json_to_event(self, resp):
"""This method is used to convert json object to event object.
Args:
resp(dict): Response containing json object for event.
Returns:
instance: Event object.
"""
event = Event()
if 'id' in resp:
event.set_id(resp['id'])
if 'title' in resp:
event.set_title(resp['title'])
if 'location' in resp:
event.set_location(resp['location'])
if 'scheduled_on' in resp:
event.set_scheduled_on(resp['scheduled_on'])
if 'scheduled_on_long' in resp:
event.set_scheduled_on(resp['scheduled_on_long'])
if 'repeat' in resp:
event.set_repeat(resp['repeat'])
if 'reminder' in resp:
event.set_reminder(resp['reminder'])
if 'repeat' in resp:
event.set_repeat(resp['repeat'])
if 'occurence(s)' in resp:
event.set_occurrences(resp['occurence(s)'])
if 'occurred' in resp:
event.set_occurred(resp['occurred'])
if 'duration_hour' in resp:
event.set_duration_hour(resp['duration_hour'])
if 'duration_minutes' in resp:
event.set_duration_minutes(resp['duration_minutes'])
if 'is_open' in resp:
event.set_is_open(resp['is_open'])
if 'participants' in resp:
for value in resp['participants']:
participant = Participant()
if 'participant_id' in value:
participant.set_participant_id(value['participant_id'])
if 'participant_person' in value:
participant.set_participant_person(value['participant_person'])
event.set_participants(participant)
return event
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['response']
def to_json(self, event):
"""This method is used to convert event object to json foramt.
Args:
event(instance): Event object.
Returns:
dict: Dictionary containing json object for event.
"""
data = {}
if event.get_title() != "":
data['title'] = event.get_title()
if event.get_scheduled_on() != "":
data['date'] = event.get_scheduled_on()
if event.get_hour() != "":
data['hour'] = event.get_hour()
if event.get_minutes() != "":
data['minutes'] = event.get_minutes()
if event.get_ampm() != "":
data['ampm'] = event.get_ampm()
if event.get_duration_hour() != "":
data['duration_hour'] = event.get_duration_hour()
if event.get_duration_minutes() != "":
data['duration_mins'] = event.get_duration_minutes()
if event.get_participants():
participants = ""
length = len(event.get_participants())
for value in event.get_participants():
participants = participants + value.get_participant_id()
if length != 1:
participants = participants + ','
length = length - 1
data['participants'] = participants
if event.get_reminder() != "":
data['remind_before'] = event.get_reminder()
if event.get_repeat() != "":
data['repeat'] = event.get_repeat()
if event.get_occurrences() != 0:
data['nooftimes_repeat'] = event.get_occurrences()
if event.get_location() != '':
data['location'] = event.get_location()
return data
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/EventsParser.py",
"copies": "1",
"size": "4477",
"license": "mit",
"hash": -4398786016526815700,
"line_mean": 34.2519685039,
"line_max": 86,
"alpha_frac": 0.54456109,
"autogenerated": false,
"ratio": 4.2760267430754535,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5320587833075453,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.model.Folder import Folder
class Document:
"""This class is used to create object for Document."""
def __init__(self):
"""Initialize parameters for Document object."""
self.id = 0
self.file_name = ""
self.content_type = ""
self.versions = []
self.folder = Folder()
self.url = ""
self.upload_doc = []
self.description = ""
self.tags = ""
self.notify = 0
def set_id(self, id):
"""Set id.
Args:
id(str): Document id.
"""
self.id = id
def get_id(self):
"""Get id.
Returns:
str: Documet id.
"""
return self.id
def set_file_name(self, file_name):
"""Set file name.
Args:
file_name(str): File name.
"""
self.file_name = file_name
def get_file_name(self):
"""Get file name.
Returns:
str: File name.
"""
return self.file_name
def set_content_type(self, content_type):
"""Set content type.
Args:
content_type(str): Content type.
"""
self.content_type = content_type
def get_content_type(self):
"""Get content type.
Returns:
str: Content type.
"""
return self.content_type
def set_versions(self, version):
"""Set versions.
Args:
version(instance): Version object.
"""
self.versions.append(version)
def get_versions(self):
"""Get versions.
Returns:
instance: Version object.
"""
return self.versions
def set_folder(self, folder):
"""Set folder.
Args:
folder(instance): Folder object.
"""
self.folder = folder
def get_folder(self):
"""Get folder.
Returns:
instance: Folder object.
"""
return self.folder
def set_url(self, url):
"""Set url.
Args:
url(str): Url.
"""
self.url = url
def get_url(self):
"""Get url.
Returns:
str: Url.
"""
return self.url
def set_upload_doc(self, upload_doc):
"""Set upload doc.
Args:
upload_doc(list of file): List of Upload doc.
"""
self.upload_doc = upload_doc
def get_upload_doc(self):
"""Get upload doc.
Returns:
list of file: List of Upload doc.
"""
return self.upload_doc
def set_description(self, description):
"""Set description.
Args:
description(str): Description.
"""
self.description = description
def get_description(self):
"""Get description.
Returns:
str: Description.
"""
return self.description
def set_tags(self, tags):
"""Set tags.
Args:
tags(str): Tags.
"""
self.tags = tags
def get_tags(self):
"""Get tags.
Returns:
str: Tags.
"""
return self.tags
def set_notify(self, notify):
"""Set notify.
Args:
notify(long): User ids which must be notified.
"""
self.notify = notify
def get_notify(self):
"""Get notify.
Returns:
long: User id which must be notified.
"""
return self.notify
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/model/Document.py",
"copies": "1",
"size": "3567",
"license": "mit",
"hash": 676752534570063100,
"line_mean": 16.835,
"line_max": 59,
"alpha_frac": 0.4763106252,
"autogenerated": false,
"ratio": 4.41460396039604,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.008305764960261803,
"num_lines": 200
} |
#$Id$
from projects.model.Folder import Folder
class FoldersParser:
"""This class is used to parse the json response for Folders."""
def get_folders(self, resp):
"""This method parses the given response and returns list of folders object.
Args:
resp(dict): Dictionary containin json object for folders.
Returns:
list of instance: List of dictionary object.
"""
folders = []
for value in resp['folders']:
folder = self.json_to_folder(value)
folders.append(folder)
return folders
def json_to_folder(self, resp):
"""This method is used to parse the json response to folder object.
Args:
resp(dict): Dictionary containing json object for folders.
Returns:
instance: Folder object.
"""
folder = Folder()
if 'id' in resp:
folder.set_id(resp['id'])
if 'name' in resp:
folder.set_name(resp['name'])
if 'is_discussion' in resp:
folder.set_is_discussion(resp['is_discussion'])
if 'link' in resp:
if 'self' in resp['link']:
if 'url' in resp['link']['self']:
folder.set_url(resp['link']['self']['url'])
return folder
def to_json(self, folder):
"""This method is used to convert folder object to json format.
Args:
folder(instance): Folder object.
Returns:
dict: Dictionary containing json object for folders.
"""
data = {}
if folder.get_name() != "":
data['name'] = folder.get_name()
return data
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['response']
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/FoldersParser.py",
"copies": "1",
"size": "2003",
"license": "mit",
"hash": -8791936608599114000,
"line_mean": 26.8194444444,
"line_max": 86,
"alpha_frac": 0.5531702446,
"autogenerated": false,
"ratio": 4.604597701149426,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5657767945749426,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.model.Forum import Forum
from projects.model.Comment import Comment
class ForumsParser:
"""This class is used to parse the givenr response for forums parser."""
def get_forums(self, resp):
"""This method parses the given repsonse and returns list of forums object.
Args:
resp(dict): Dictionary containg json object for list of forums.
Returns:
list of instance: List of forums object.
"""
forums = []
for value in resp['forums']:
forum = self.json_to_forum(value)
forums.append(forum)
return forums
def json_to_forum(self, resp):
"""This method parses the given response and returns forum obejct.
Args:
resp(dict): Dictionary containing json object for forum
Returns:
instance: Forum object.
"""
forum = Forum()
if 'id' in resp:
forum.set_id(resp['id'])
if 'name' in resp:
forum.set_name(resp['name'])
if 'content' in resp:
forum.set_content(resp['content'])
if 'is_sticky_post' in resp:
forum.set_is_sticky_post(resp['is_sticky_post'])
if 'is_announcement_post' in resp:
forum.set_is_announcement_post(resp['is_announcement_post'])
if 'posted_by' in resp:
forum.set_posted_by(resp['posted_by'])
if 'posted_person' in resp:
forum.set_posted_person(resp['posted_person'])
if 'post_date' in resp:
forum.set_post_date(resp['post_date'])
if 'post_date_long' in resp:
forum.set_post_date_long(resp['post_date_long'])
if 'link' in resp:
if 'self' in resp['link']:
if 'url' in resp['link']['self']:
forum.set_url(resp['link']['self']['url'])
return forum
def to_json(self, forum):
"""This method is used to convert forum object to json format.
Args:
forum(instance): Forum object.
Returns:
dict: Dictionary containing json object for forum.
"""
data = {}
if forum.get_name() != "":
data['name'] = forum.get_name()
if forum.get_content() != "":
data['content'] = forum.get_content()
if forum.get_category_id() != 0:
data['category_id'] = forum.get_category_id()
if forum.get_notify() != "":
data['notify'] = forum.get_notify()
return data
def get_comments(self, resp):
"""This method parses the given response and returns list of comments object.
Args:
resp(dict): Dictionary containing json object for comments.
Returns:
list of instance: List of comments object.
"""
comments = []
for value in resp['comments']:
comment = self.json_to_comment(value)
comments.append(comment)
return comments
def json_to_comment(self, resp):
"""This method parses the given response and returns comment object.
Args:
resp(dict): Dictionary containing json object for comment.
Returns:
instance: Comment object.
"""
comment = Comment()
if 'id' in resp:
comment.set_id(resp['id'])
if 'content' in resp:
comment.set_content(resp['content'])
if 'posted_by' in resp:
comment.set_posted_by(resp['posted_by'])
if 'posted_person' in resp:
comment.set_posted_person(resp['posted_person'])
if 'post_date' in resp:
comment.set_post_date(resp['post_date'])
if 'post_date_long' in resp:
comment.set_post_date_long(resp['post_date_long'])
return comment
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['response']
def comment_to_json(self, comment):
"""This method parses the comment object and returns json object.
Args:
comment(instance): Comment object.
Returns:
dict: json object for comment.
"""
data = {}
if comment.get_content() != "":
data['content'] = comment.get_content()
return data
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/ForumsParser.py",
"copies": "1",
"size": "4532",
"license": "mit",
"hash": 1613826693296957200,
"line_mean": 29.6216216216,
"line_max": 86,
"alpha_frac": 0.5545013239,
"autogenerated": false,
"ratio": 4.219739292364991,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.011102079255647821,
"num_lines": 148
} |
#$Id$
from projects.model.Milestone import Milestone
class MilestonesParser:
"""This class is used to parse the json response for milestones."""
def get_milestones(self, resp):
"""This method parses the given response and returns list of milestone object.
Args:
resp(dict): Dictionary containing response for milestones.
Returns:
list of instance: List of milestones object.
"""
milestones = []
for value in resp['milestones']:
milestone = self.get_milestone(value)
milestones.append(milestone)
return milestones
def get_milestone(self, resp):
"""This method parses the json response for milestone.
Args:
resp(dict): Response containing json object for milestone.
Returns:
instance: Milestone object.
"""
milestone = Milestone()
if 'id' in resp:
milestone.set_id(resp['id'])
if 'link' in resp:
link = resp['link']
if 'self' in link:
self_url = link['self']
if 'url' in self_url:
milestone.set_url(self_url['url'])
if 'status' in link:
status = link['status']
if 'url' in status:
milestone.set_status_url(status['url'])
if 'name' in resp:
milestone.set_name(resp['name'])
if 'owner_name' in resp:
milestone.set_owner_name(resp['owner_name'])
if 'owner_id' in resp:
milestone.set_owner_id(resp['owner_id'])
if 'flag' in resp:
milestone.set_flag(resp['flag'])
if 'start_date' in resp:
milestone.set_start_date(resp['start_date'])
if 'start_date_long' in resp:
milestone.set_start_date_long(resp['start_date_long'])
if 'end_date' in resp:
milestone.set_end_date(resp['end_date'])
if 'end_date_long' in resp:
milestone.set_end_date_long(resp['end_date_long'])
if 'status' in resp:
milestone.set_status(resp['status'])
if 'completed_date' in resp:
milestone.set_completed_date(resp['completed_date'])
if 'completed_date_long' in resp:
milestone.set_completed_date_long(resp['completed_date_long'])
if 'start_date_format' in resp:
milestone.set_start_date_format(resp['start_date_format'])
if 'end_date_format' in resp:
milestone.set_end_date_format(resp['end_date_format'])
return milestone
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['response']
def to_json(self, milestone):
"""This method is used to parse the projects object to json format.
Args:
milestone(instance): Milestone object.
Returns:
dict: Dictionary containing json object for milestone.
"""
data = {}
if milestone.get_name() != "":
data['name'] = milestone.get_name()
if milestone.get_start_date() != "":
data['start_date'] = milestone.get_start_date()
if milestone.get_end_date() != "":
data['end_date'] = milestone.get_end_date()
if milestone.get_owner_id() != 0:
data['owner'] = milestone.get_owner_id()
if milestone.get_flag() != "":
data['flag'] = milestone.get_flag()
return data
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/MilestonesParser.py",
"copies": "1",
"size": "3720",
"license": "mit",
"hash": 1687541552571029200,
"line_mean": 32.8181818182,
"line_max": 86,
"alpha_frac": 0.5553763441,
"autogenerated": false,
"ratio": 4.047878128400435,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5103254472500435,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.model.Milestone import Milestone
class TaskList:
"""This class is used to create object for TasksList."""
def __init__(self):
"""Initialize parameters for Taskslist."""
self.id = ""
self.id_string = ""
self.name = ""
self.milestone = Milestone()
self.completed = None
self.created_time = ""
self.created_time_long = 0
self.rolled = None
self.sequence = 0
self.view_type = ""
self.url = ""
self.task_url = ""
def set_id(self, id):
"""Set id.
Args:
id(str): Id.
"""
self.id = id
def get_id(self):
"""Get id.
Returns:
str: Id.
"""
return self.id
def set_id_string(self, id_string):
"""
Set the task list id string.
Args:
id_string(str): task list id string.
"""
self.id_string = id_string
def get_id_string(self):
"""
Get the task list id string.
Returns:
str: Returns the task list id string.
"""
return self.id_string;
def set_name(self, name):
"""Set name.
Args:
name(str): Name.
"""
self.name = name
def get_name(self):
"""Get name.
Returns:
str: Name.
"""
return self.name
def set_milestone(self, milestone):
"""Set milestone.
Args:
milestone(instance): Milestone object.
"""
self.milestone = milestone
def get_milestone(self):
"""Get milestone.
Returns:
instance: Milestone object.
"""
return self.milestone
def set_completed(self, completed):
"""Set completed.
Args:
completed(bool): Completed.
"""
self.completed = completed
def get_completed(self):
"""Get completed.
Returns:
bool: Completed.
"""
return self.completed
def set_created_time(self, created_time):
"""Set created time.
Args:
created_time(str): Created time.
"""
self.created_time = created_time
def get_created_time(self):
"""Get created time.
Returns:
str: Created time.
"""
return self.created_time
def set_created_time_long(self, created_time_long):
"""Set created time long.
Args:
created_time_long(long): Created time long.
"""
self.created_time_long = created_time_long
def get_created_time_long(self):
"""Get created time long.
Returns:
long: Created time long.
"""
return self.created_time_long
def set_rolled(self, rolled):
"""Set rolled.
Args:
rolled(bool): Rolled.
"""
self.rolled = rolled
def get_rolled(self):
"""Get rolled.
Returns:
bool: Rolled.
"""
return self.rolled
def set_sequence(self, sequence):
"""Set sequence.
Args:
sequence(int): Sequence.
"""
self.sequence = sequence
def get_sequence(self, sequence):
"""Get sequence.
Args:
sequence(int): Sequence.
"""
return self.sequence
def set_view_type(self, view_type):
"""Set view type.
Args:
view_type(str): View type.
"""
self.view_type = view_type
def get_view_type(self):
"""Get view type.
Returns:
str: View type.
"""
return self.view_type
def set_url(self, url):
"""Set url.
Args:
url(str): Self url.
"""
self.url = url
def get_url(self):
"""Get url.
Returns:
str: Self url.
"""
return self.url
def set_task_url(self, url):
"""Set task url.
Args:
url(str): Task url.
"""
self.task_url = url
def get_task_url(self):
"""Get task url.
Returns:
str: Task url.
"""
return self.task_url
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/model/TaskList.py",
"copies": "1",
"size": "4340",
"license": "mit",
"hash": -7868159375934776000,
"line_mean": 16.7142857143,
"line_max": 60,
"alpha_frac": 0.4737327189,
"autogenerated": false,
"ratio": 4.177093358999038,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5150826077899038,
"avg_score": null,
"num_lines": null
} |
#$ID$
from projects.model.Project import Project
class Bug:
"""This class is used to create object for bug."""
def __init__(self):
"""Initialize parameters for Bug object."""
self.id = 0
self.key = ""
self.project = Project()
self.flag = ""
self.title = ""
self.reporter_id = ""
self.reported_person = ""
self.created_time = ""
self.created_time_long = 0
self.created_time_format = ""
self.assignee_name = ""
self.classification_id = 0
self.classification_type = ""
self.severity_id = 0
self.severity_type = ""
self.status_id = 0
self.status_type = ""
self.module_id = 0
self.module_name = ""
self.closed = None
self.reproducible_id = 0
self.reproducible_type = ""
self.url = ""
self.timesheet_url = ""
self.milestone_id = 0
self.due_date = ""
self.description = ""
self.assignee_id = 0
def set_id(self, id):
"""Set id.
Args:
id(long): Id.
"""
self.id = id
def get_id(self):
"""Get id.
Returns:
long: Id.
"""
return self.id
def set_key(self, key):
"""Set key.
Args:
key(str): Key.
"""
self.key = key
def get_key(self):
"""Get key.
Returns:
str: Key.
"""
return self.key
def set_project(self, project):
"""Set project.
Args:
project(instance): Project object.
"""
self.project = project
def get_project(self):
"""Get project.
Returns:
instance: PRoject object.
"""
return self.project
def set_flag(self, flag):
"""Set flag.
Args:
flag(str): Flag.
"""
self.flag = flag
def get_flag(self):
"""Get flag.
Returns:
str: Flag.
"""
return self.flag
def set_title(self, title):
"""Set title.
Args:
title(str): Title.
"""
self.title = title
def get_title(self):
"""Get title.
Returns:
str: Title.
"""
return self.title
def set_reporter_id(self, reporter_id):
"""Set reporter id.
Args:
reporte_id(str): Reporter id.
"""
self.reporter_id = reporter_id
def get_reporter_id(self):
"""Get reporter id.
Returns:
str: Reporter id.
"""
return self.reporter_id
def set_reported_person(self, reported_person):
"""Set reported person.
Args:
reported_person(str): Reported person.
"""
self.reported_person = reported_person
def get_reported_person(self):
"""Get reported_person.
Returns:
str: Reported person.
"""
return self.reported_person
def set_created_time(self, created_time):
"""Set created time.
Args:
created_time(str): Created time.
"""
self.created_time = created_time
def get_created_time(self):
"""Get created time.
Returns:
str: Created time.
"""
return self.created_time
def set_created_time_long(self, created_time_long):
"""Set created time long.
Args:
created_time_long(long): Created time long.
"""
self.created_time_long = created_time_long
def get_created_time_long(self):
"""Get created time long.
Returns:
long: Created time long.
"""
return self.created_time_long
def set_created_time_format(self, created_time_format):
"""Set created time format.
Args:
created_time_format(str): Created time format.
"""
self.created_time_format = created_time_format
def get_created_time_format(self):
"""Get created time format.
Returns:
str: Created time format.
"""
return self.created_time_format
def set_assignee_name(self, assignee_name):
"""Set assignee name.
Args:
assignee_name(str): Assignee name.
"""
self.assignee_name = assignee_name
def get_assignee_name(self):
"""Get assignee name.
Returns:
str: Assignee name.
"""
return self.assignee_name
def set_classification_id(self, classification_id):
"""Set classification id.
Args:
classification(long): Classification id.
"""
self.classification = classification_id
def get_classification_id(self):
"""Get classification id.
Returns:
long: Classification id.
"""
return self.classification_id
def set_classification_type(self, classification_type):
"""Set classification type.
Args:
classification_type(str): Classification type.
"""
self.classification_type = classification_type
def get_classification_type(self):
"""Get classification type.
Returns:
str: Classification type.
"""
return self.classification_type
def set_severity_id(self, severity_id):
"""Set severity id.
Args:
severity(long): Severity id.
"""
self.severity = severity_id
def get_severity_id(self):
"""Get severity id.
Returns:
long: Severity id.
"""
return self.severity_id
def set_severity_type(self, severity_type):
"""Set severity type.
Args:
severity_type(str): Severity type.
"""
self.severity_type = severity_type
def get_severity_type(self):
"""Get severity type.
Returns:
str: Severity type.
"""
return self.severity_type
def set_status_id(self, status_id):
"""Set status id.
Args:
status(long): Status id.
"""
self.status_id = status_id
def get_status_id(self):
"""Get status id.
Returns:
long: Status id.
"""
return self.status_id
def set_status_type(self, status_type):
"""Set status type.
Args:
status_type(str): Status type.
"""
self.status_type = status_type
def get_status_type(self):
"""Get status type.
Returns:
str: Status type.
"""
return self.status_type
def set_closed(self, closed):
"""Set closed.
Args:
closed(bool): True if closed else False.
"""
self.closed = closed
def get_closed(self):
"""Get closed.
Returns:
bool: True if closed else false.
"""
return self.closed
def set_reproducible_id(self, reproducible_id):
"""Set reproducible id.
Args:
reproducible_id(long): reproducible id.
"""
self.reproducible_id = reproducible_id
def get_reproducible_id(self):
"""Get reproducible id.
Returns:
long: Reproducible id.
"""
return self.reproducible_id
def set_reproducible_type(self, reproducible_type):
"""Set reproducible type.
Args:
reproducible_type(str): Reproducible type.
"""
self.reproducible_type = reproducible_type
def get_reproducible_type(self):
"""Get reproducile type.
Returns:
str: Reproducible type.
"""
return self.reproducible_type
def set_module_id(self, module_id):
"""Set module id.
Args:
module_id (long): module id.
"""
self.module_id = module_id
def get_module_id(self):
"""Get module id.
Returns:
long: Module id.
"""
return self.module_id
def set_module_name(self, module_name):
"""Set module name.
Args:
module_name(str): Module name.
"""
self.module_name = module_name
def get_module_name(self):
"""Get module name.
Args:
str: module name.
"""
return self.module_name
def set_url(self, url):
"""Set url.
Args:
url(str): Url.
"""
self.url = url
def get_url(self):
"""Get url.
Returns:
str: Url.
"""
return self.url
def set_timesheet_url(self, timesheet_url):
"""Set timesheet url.
Args:
timesheet_url(str): Timesheet url.
"""
self.timesheet_url = timesheet_url
def get_timesheet_url(self):
"""Get timesheet url.
Returns:
str: Timesheet url.
"""
return self.timesheet_url
def set_milestone_id(self, milestone_id):
"""Set milestone id.
Args:
milestone_id(long): Milestone id.
"""
self.milestone_id = milestone_id
def get_milestone_id(self):
"""Get milestone id.
Returns:
long: Milestone id.
"""
return self.milestone_id
def set_due_date(self, due_date):
"""Set due date.
Args:
due_date(str): Due date.
"""
self.due_date = due_date
def get_due_date(self):
"""Get due date.
Returns:
str: Due date.
"""
return self.due_date
def set_description(self, description):
"""Set description.
Args:
description(str): Description.
"""
self.description = description
def get_description(self):
"""Get description.
Returns:
str: Description.
"""
return self.description
def set_assignee_id(self, assignee_id):
"""Set assignee id.
Args:
assignee_id(long): Assignee id.
"""
self.assignee_id
def get_assignee_id(self):
"""Get assignee id.
Returns:
long: Assignee id.
"""
return self.assignee_id
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/model/Bug.py",
"copies": "1",
"size": "10484",
"license": "mit",
"hash": -6161747157567498000,
"line_mean": 18.2720588235,
"line_max": 60,
"alpha_frac": 0.5040061045,
"autogenerated": false,
"ratio": 4.1422362702489135,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5146242374748914,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.model.Project import Project
class ProjectsParser:
"""This class is used to parse the json response for projects."""
def get_projects(self, resp):
"""This method parses the given response and returns list of projects.
Args:
resp(dict): Dictionary containing json response for projects.
Returns:
list of instance: List of projects object.
"""
projects = []
for value in resp['projects']:
project = self.get_project(value)
projects.append(project)
return projects
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['response']
def to_json(self, project):
"""This method is used to parse the projects object to json format.
Args:
project(instance): Project object.
Returns:
dict: Dictionary containing json object for project.
"""
data = {}
if project.get_name() != "":
data['name'] = project.get_name()
if project.get_description() != "":
data['description'] = project.get_description()
if project.get_status() != "":
data['status'] = project.get_status()
return data
def get_project(self, resp):
"""This method is used to parse the given response and returns project object.
Args:
resp(dict): Dictionary containing json object for project.
Returns:
instance: Project object.
"""
project = Project()
if 'created_date' in resp:
project.set_created_date(resp['created_date'])
if 'id' in resp:
project.set_id(resp['id'])
if 'id_string' in resp:
project.set_id_string(resp['id_string'])
if 'bug_count' in resp:
if 'open' in resp['bug_count']:
project.set_open_bug_count(resp['bug_count']['open'])
if 'closed' in resp['bug_count']:
project.set_closed_bug_count(resp['bug_count']['closed'])
if 'owner_name' in resp:
project.set_owner_name(resp['owner_name'])
if 'task_count' in resp:
if 'open' in resp['task_count']:
project.set_open_task_count(resp['task_count']['open'])
if 'closed' in resp['bug_count']:
project.set_closed_task_count(resp['task_count']['closed'])
if 'status' in resp:
project.set_status(resp['status'])
if 'created_date_format' in resp:
project.set_created_date_format(resp['created_date_format'])
if 'name' in resp:
project.set_name(resp['name'])
if 'owner_id' in resp:
project.set_owner_id(resp['owner_id'])
if 'created_date_long' in resp:
project.set_created_date_long(resp['created_date_long'])
if 'milestone_count' in resp:
if 'open' in resp['milestone_count']:
project.set_open_milestone_count(resp['milestone_count']['open'])
if 'closed' in resp['milestone_count']:
project.set_closed_milestone_count(resp['milestone_count']['closed'])
if 'link' in resp:
link = resp['link']
if 'forum' in link:
forum = link['forum']
project.set_forum_url(forum['url'])
if 'status' in link:
status = link['status']
project.set_status_url(status['url'])
if 'task' in link:
task = link['task']
project.set_task_url(task['url'])
if 'self' in link:
self_url = link['self']
project.set_url(self_url['url'])
if 'tasklist' in link:
tasklist = link['tasklist']
project.set_url(tasklist['url'])
if 'milestone' in link:
milestone = link['milestone']
project.set_url(milestone['url'])
if 'folder' in link:
folder = link['folder']
project.set_url(folder['url'])
if 'document' in link:
document = link['document']
project.set_url(document['url'])
if 'event' in link:
event = link['event']
project.set_event_url(event['url'])
if 'bug' in link:
bug = link['bug']
project.set_bug_url(bug['url'])
if 'timesheet' in link:
timesheet = link['timesheet']
project.set_timesheet_url(timesheet['url'])
if 'user' in link:
user = link['user']
project.set_user_url(user['url'])
if 'activity' in link:
activity = link['activity']
project.set_activity_url(activity['url'])
return project
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/ProjectsParser.py",
"copies": "1",
"size": "5135",
"license": "mit",
"hash": -2856149939606493000,
"line_mean": 35.161971831,
"line_max": 86,
"alpha_frac": 0.5252190847,
"autogenerated": false,
"ratio": 4.34065934065934,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5365878425359341,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.model.Task import Task
from projects.model.Owner import Owner
from projects.model.TaskList import TaskList
from projects.model.Comment import Comment
class TasksParser:
"""This class is used to parse the json response for Tasks."""
def get_tasks(self, resp):
"""This method parses the given response and returns list of tasks object.
Args:
resp(dict): Dictionary containing json object for tasks object.
Returns:
list of instance: List of tasks object.
"""
tasks = []
for value in resp['tasks']:
task = self.get_task(value)
tasks.append(task)
return tasks
def get_task(self, resp):
"""This method parses the given response and returns task object.
Args:
resp(dict): Response containing json object for task.
Returns:
instance: Task object.
"""
task = Task()
if 'id' in resp:
task.set_id(resp['id'])
if 'id_string' in resp:
task.set_id_string(resp['id_string'])
if 'name' in resp:
task.set_name(resp['name'])
if 'completed' in resp:
task.set_completed(resp['completed'])
if 'created_by' in resp:
task.set_created_by(resp['created_by'])
if 'created_person' in resp:
task.set_created_person(resp['created_person'])
if 'priority' in resp:
task.set_priority(resp['priority'])
if 'percent_complete' in resp:
task.set_percent_complete(resp['percent_complete'])
if 'start_date' in resp:
task.set_start_date(resp['start_date'])
if 'start_date_long' in resp:
task.set_start_date_long(resp['start_date_long'])
if 'end_date' in resp:
task.set_end_date(resp['end_date'])
if 'end_date_long' in resp:
task.set_end_date_long(resp['end_date_long'])
if 'duration' in resp:
task.set_duration(resp['duration'])
if 'details' in resp:
if 'owners' in resp['details']:
for owner in resp['details']['owners']:
owner_obj = Owner()
if 'name' in owner:
owner_obj.set_name(owner['name'])
if 'id' in owner:
owner_obj.set_id(owner['id'])
task.set_details(owner_obj)
if 'link' in resp:
link = resp['link']
if 'url' in link['self']:
task.set_url(link['self']['url'])
if 'subtask' in link:
if 'url' in link['subtask']:
task.set_subtask_url(link['subtask']['url'])
if 'url' in link['timesheet']:
task.set_timesheet_url(link['timesheet']['url'])
if 'tasklist' in resp:
tasklist = resp['tasklist']
tasklist_obj = TaskList()
if 'id' in tasklist:
tasklist_obj.set_id(tasklist['id'])
if 'name' in tasklist:
tasklist_obj.set_name(tasklist['name'])
task.set_tasklist(tasklist_obj)
if 'subtasks' in resp:
task.set_subtasks(resp['subtasks']);
return task
def get_comments(self, resp):
"""
Parse the JSON response and make it into list of Comment object.
Args:
resp(dict): Response contains the details of the task comments.
Returns:
list of instance: Returns list of Comment object.
"""
comments = [];
for json_obj in resp['comments']:
comments.append(self.json_to_comment(json_obj));
return comments;
def get_comment(self, resp):
"""
Parse the JSON response and make it into Comment object.
Args:
resp(dict): Response contains the details of the task comment.
Returns:
instance: Returns the Comment object.
"""
comment = Comment();
if 'comments' in resp:
comments = resp['comments'];
comment = self.json_to_comment(comments[0]);
return comment;
def json_to_comment(self, json_obj):
"""
Parse the JSON object into Comment object.
Args:
json_obj(dict): JSON object contains the details of task comment.
Returns:
instance: Returns the Comment object.
"""
comment = Comment();
if 'content' in json_obj:
comment.set_content(json_obj['content']);
if 'id' in json_obj:
comment.set_id(json_obj['id']);
if 'created_time_long' in json_obj:
comment.set_created_time_long(json_obj['created_time_long']);
if 'added_by' in json_obj:
comment.set_added_by(json_obj['added_by']);
if 'added_person' in json_obj:
comment.set_added_person(json_obj['added_person']);
if 'created_time_format' in json_obj:
comment.set_created_time_format(json_obj['created_time_format']);
if 'created_time' in json_obj:
comment.set_created_time(json_obj['created_time']);
return comment;
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing respobject for message.
Returns:
str: Success message.
"""
return resp['response']
def to_json(self, task):
"""This method is used to parse the Task object to json format.
Args:
task(instance): Task object.
Returns:
dict: Dictionary containing json object for task.
"""
data = {}
if task.get_details()['owners']:
data['person_responsible'] = ''
length = len(task.get_details()['owners'])
for value in task.get_details()['owners']:
data['person_responsible'] = data['person_responsible'] + value.get_id()
if length !=1:
data['person_responsible'] = data['person_responsible'] + ','
length = length - 1
if task.get_name() != "":
data['name'] = task.get_name()
if task.get_start_date() != "":
data['start_date'] = task.get_start_date()
if task.get_end_date() != "":
data['end_date'] = task.get_end_date()
if task.get_percent_complete() != 0:
data['percent_complete'] = task.get_percent_complete()
if task.get_duration() != 0:
data['duration'] = task.get_duration()
if task.get_priority() != "":
data['priority'] = task.get_priority()
return data
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/TasksParser.py",
"copies": "1",
"size": "7029",
"license": "mit",
"hash": -13638833921067692,
"line_mean": 28.1659751037,
"line_max": 89,
"alpha_frac": 0.5235453123,
"autogenerated": false,
"ratio": 3.9801812004530013,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.941947138349785,
"avg_score": 0.11685102585103023,
"num_lines": 241
} |
#$Id$
from projects.model.TaskList import TaskList
from projects.model.Milestone import Milestone
class TaskListParser:
"""This class parses the json response for Tasklists."""
def get_tasklists(self, resp):
"""This method parses the given response and returns list of task list object.
Args:
resp(dict): Response containing json object for taskslist.
Returns:
list of instance: List of taskslist object.
"""
tasklists = []
for value in resp['tasklists']:
tasklist = self.get_tasklist(value)
tasklists.append(tasklist)
return tasklists
def get_tasklist(self, resp):
"""This method parses the given response and returns task list object.
Args:
resp(dict): Response containing json object for taskslist.
Returns:
instance: Taskslist object.
"""
tasklist = TaskList()
if 'id' in resp:
tasklist.set_id(resp['id'])
if 'id_string' in resp:
tasklist.set_id_string(resp['id_string'])
if 'name' in resp:
tasklist.set_name(resp['name'])
if 'milestone' in resp:
milestone = resp['milestone']
milestone_obj = Milestone()
if 'id' in milestone:
milestone_obj.set_id(milestone['id'])
if 'link' in milestone:
link = milestone['link']
if 'self' in link:
if 'url' in link['self']:
milestone_obj.set_url(link['self']['url'])
if 'status' in link:
if 'url' in link['status']:
milestone_obj.set_status_url(link['status']['url'])
if 'name' in milestone:
milestone_obj.set_name(milestone['name'])
if 'owner_name' in milestone:
milestone_obj.set_owner_name(milestone['owner_name'])
if 'owener_id' in milestone:
milestone_obj.set_owner_id(milestone['owner_id'])
if 'flag' in milestone:
milestone_obj.set_flag(milestone['flag'])
if 'start_date' in milestone:
milestone_obj.set_start_date(milestone['start_date'])
if 'start_date_long' in milestone:
milestone_obj.set_start_date_long(milestone['start_date_long'])
if 'end_date' in milestone:
milestone_obj.set_end_date(milestone['end_date'])
if 'end_date_long' in milestone:
milestone_obj.set_end_date_long(milestone['end_date_long'])
if 'status' in milestone:
milestone_obj.set_status(milestone['status'])
tasklist.set_milestone(milestone_obj)
if 'completed' in resp:
tasklist.set_completed(resp['completed'])
if 'created_time' in resp:
tasklist.set_created_time(resp['created_time'])
if 'created_time_long' in resp:
tasklist.set_created_time_long(resp['created_time_long'])
if 'rolled' in resp:
tasklist.set_rolled(resp['rolled'])
if 'sequence' in resp:
tasklist.set_sequence(resp['sequence'])
if 'view_type' in resp:
tasklist.set_view_type(resp['view_type'])
if 'link' in resp:
link = resp['link']
if 'self' in link:
if 'url' in link['self']:
tasklist.set_url(link['self']['url'])
if 'task' in link:
if 'url' in link['task']:
tasklist.set_task_url(link['task']['url'])
return tasklist
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['response']
def to_json(self, tasklist):
"""This method is used to create json object for tasklist.
Args:
tasklist(instance): Tasklist object.
Returns:
dict: Dictionary containing json object for task list.
"""
data = {}
if tasklist.get_milestone().get_id() != 0:
data['milestone_id'] = tasklist.get_milestone().get_id()
if tasklist.get_name() != "":
data['name'] = tasklist.get_name()
if tasklist.get_milestone().get_flag() != "":
data['flag'] = tasklist.get_milestone().get_flag()
if tasklist.get_milestone().get_status() != "":
data['status'] = tasklist.get_milestone().get_status()
return data
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/TaskListParser.py",
"copies": "1",
"size": "4769",
"license": "mit",
"hash": 4896295610100063000,
"line_mean": 35.9689922481,
"line_max": 86,
"alpha_frac": 0.5414132942,
"autogenerated": false,
"ratio": 4.143353605560383,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.018386667246319686,
"num_lines": 129
} |
#$Id$
from projects.model.TaskList import TaskList
class Task:
"""This class is used to create object for task."""
def __init__(self):
"""Initialize parameters for tasks object."""
self.id = ""
self.id_string = ""
self.name = ""
self.completed = ""
self.created_by = ""
self.priority = ""
self.percent_complete = 0
self.start_date = ""
self.start_date_long = 0
self.end_date = ""
self.end_date_long = 0
self.details ={
'owners': []
}
self.url = ""
self.subtask_url = ""
self.timesheet_url = ""
self.tasklist = TaskList()
self.end_date_format = ""
self.start_date_format = ""
self.created_person = ""
self.duration = 0
self.subtasks = None;
def set_id(self, id):
"""Set id.
Args:
id(long): Id.
"""
self.id = id
def get_id(self):
"""Get id.
Returns:
long: Id.
"""
return self.id
def set_id_string(self, id_string):
"""
Set the task id string.
Args:
id_string(str): Task id string.
"""
self.id_string = id_string;
def get_id_string(self):
"""
Get the task id string.
Returns:
str: Returns the task id string.
"""
return self.id_string
def set_name(self, name):
"""Set name.
Args:
name(str): Name.
"""
self.name = name
def get_name(self):
"""Get name.
Returns:
str: Name.
"""
return self.name
def set_completed(self, completed):
"""Set completed.
Args:
completed(bool): Completed.
"""
self.completed = completed
def get_completed(self):
"""Get completed.
Returns:
bool: Completed.
"""
return self.completed
def set_created_by(self, created_by):
"""Set created by.
Args:
created_by(str): Created by.
"""
self.created_by = created_by
def get_created_by(self):
"""Get created by.
Returns:
str: Created by.
"""
return self.created_by
def set_created_person(self, created_person):
"""Set created person.
Args:
created_person(str): Created person.
"""
self.created_person = created_person
def get_created_person(self):
"""Get created person.
Returns:
str: Created person.
"""
return self.created_person
def set_priority(self, priority):
"""Set priority.
Args:
priority(str): Priority.
"""
self.priority = priority
def get_priority(self):
"""Get priority.
Returns:
str: Priority.
"""
return self.priority
def set_percent_complete(self, percent_complete):
"""Set percent complete.
Args:
percent_complete(int): Percent complete.
"""
self.percent_complete = percent_complete
def get_percent_complete(self):
"""Get percent complete.
Returns:
int: Percent complete.
"""
return self.percent_complete
def set_start_date(self, start_date):
"""Set start date.
Args:
start_date(str): Start date.
"""
self.start_date = start_date
def get_start_date(self):
"""Get start date.
Returns:
str: Start date.
"""
return self.start_date
def set_start_date_long(self, start_date_long):
"""Set start date long.
Args:
start_date_long(long): Start date long.
"""
self.start_date_long = start_date_long
def get_start_date_long(self):
"""Get start date long.
Returns:
long: Start date long.
"""
return self.start_date_long
def set_end_date(self, end_date):
"""Set end date.
Args:
end_date(str): End date.
"""
self.end_date = end_date
def get_end_date(self):
"""Get end date.
Returns:
str: End date.
"""
return self.end_date
def set_end_date_long(self, end_date_long):
"""Set end date long.
Args:
end_date_long(long): End date long.
"""
self.end_Date_long = end_date_long
def get_end_date_long(self):
"""Get end date long.
Returns:
long: End date long.
"""
return self.end_date_long
def set_duration(self, duration):
"""Set duration.
Args:
duration(str): Duration.
"""
self.duration = duration
def get_duration(self):
"""Get duration.
Returns:
str: Duration.
"""
return self.duration
def set_details(self, owner):
"""Set details.
Args:
owner(instance): Owner details.
"""
self.details['owners'].append(owner)
def get_details(self):
"""Get details.
Returns:
dict: Details.
"""
return self.details
def set_tasklist(self, tasklist):
"""Set tasklist.
Args:
tasklist(instance): Tasklist object.
"""
self.tasklist = tasklist
def get_tasklist(self):
"""Get task list.
Returns:
instance: Tasklist object.
"""
return self.tasklist
def set_url(self, url):
"""Set url.
Args:
url(str): Url.
"""
self.url = url
def get_url(self):
"""Get url.
Returns:
str: Url.
"""
return self.url
def set_subtask_url(self, subtask_url)
"""
Set the subtask url.
Args:
subtask_url(str): Subtask Url.
"""
self.subtask_url = subtask_url
def get_subtask_url()
"""
Get the subtask url.
Returns:
str: Returns the subtask url.
"""
def set_timesheet_url(self, timesheet_url):
"""Set timesheet url.
Args:
timesheet_url(str): Timesheet url.
"""
self.timesheet_url = timesheet_url
def get_timesheet_url(self):
"""Get timesheet url.
Returns:
str: Timesheet url.
"""
return self.timesheet_url
def set_start_date_format(self, start_date_format):
"""Set start date format.
Args:
start_date_format(str): Start date format.
"""
self.start_date_format = start_date_format
def get_start_date_format(self):
"""Get start date format.
Returns:
str: Start date format.
"""
return self.start_date_format
def set_end_date_format(self, end_date_format):
"""Set end date format.
Args:
end_date_format(str): End date format.
"""
self.end_date_format = end_date_format
def get_end_date_format(self):
"""Get end date format.
Returns:
str: End date format.
"""
return self.end_date_format
def set_subtasks(self, subtasks):
"""
Set the task has subtasks or not.
Args:
subtasks(bool): True or false.
"""
self.subtasks = subtasks;
def get_subtasks(self):
"""
Get the task has subtasks or not.
Returns:
bool: Returns true if the task has subtasks else returns false.
"""
return self.subtasks;
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/model/Task.py",
"copies": "1",
"size": "7932",
"license": "mit",
"hash": -95959538744144750,
"line_mean": 17.5761124122,
"line_max": 69,
"alpha_frac": 0.4895360565,
"autogenerated": false,
"ratio": 4.118380062305296,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9870637169132201,
"avg_score": 0.04745578993461893,
"num_lines": 427
} |
#$Id$
from projects.model.Tasklog import Tasklog
from projects.model.Timelog import Timelog
from projects.model.Date import Date
from projects.model.Buglog import Buglog
from projects.model.Generallog import Generallog
class TimesheetsParser:
"""This class parses the json response for Time sheets."""
def get_task_logs(self, resp):
"""This method parses the given response and returns task log object.
Args:
resp(dict): Response containing json object for task log.
Returns:
instance: task log object.
"""
for value in resp['timelogs']['task_logs']:
tasklog = self.json_to_tasklog(value)
return tasklog
def get_message(self, resp):
"""This method is used to parse the given response and returns string message.
Args:
resp(dict): Response containing json object for message.
Returns:
str: Success message.
"""
return resp['response']
def get_time_logs(self, resp):
"""This method parses the given response and returns time log object.
Args:
resp(dict): Response containing json object for time logs.
Returns:
instance: Time log object.
"""
timelog = Timelog()
resp = resp['timelogs']
if 'grandtotal' in resp:
timelog.set_grandtotal(resp['grandtotal'])
if 'role' in resp:
timelog.set_role(resp['role'])
if 'date' in resp:
for value in resp['date']:
date = Date()
if 'total_hours' in value:
date.set_total_hours(value['total_hours'])
if 'display_format' in value:
date.set_display_format(value['display_format'])
if 'date_long' in value:
date.set_date_long(value['date_long'])
if 'task_logs' in value:
for task_log in value['task_logs']:
tasklog = self.json_to_tasklog(task_log)
date.set_task_logs(tasklog)
if 'bug_logs' in value:
for bug_log in value['bug_logs']:
buglog = self.json_to_buglog(bug_log)
date.set_bug_logs(buglog)
if 'general_logs' in value:
for general_log in value['general_logs']:
generallog = self.json_to_generallog(general_log)
date.set_general_logs(generallog)
timelog.set_date(date)
return timelog
def get_bug_log(self, resp):
"""This method parses the given response and returns bug log object.
Args:
resp(dict): Response containing json object for bug log.
Returns:
instance: Bug log object.
"""
for value in resp['timelogs']['bug_logs']:
buglog = self.json_to_buglog(value)
return buglog
def get_general_log(self, resp):
"""This method parses the given response and returns general log object.
Args:
resp(dict): Response containing json object for general log.
Returns:
instance: General log object.
"""
for value in resp['timelogs']['general_logs']:
general_log = self.json_to_generallog(value)
return general_log
def json_to_tasklog(self, value):
"""This method parses the given response and returns task log object.
Args:
value(dict): Response containing json object for task log.
Returns:
instance: Task log object.
"""
tasklog = Tasklog()
if 'owner_name' in value:
tasklog.set_owner_name(value['owner_name'])
if 'hours' in value:
tasklog.set_hours(value['hours'])
if 'total_minutes' in value:
tasklog.set_total_minutes(value['total_minutes'])
if 'bill_status' in value:
tasklog.set_bill_status(value['bill_status'])
if 'link' in value:
if 'self' in value['link']:
if 'url' in value['link']['self']['url']:
tasklog.set_url(value['link']['self']['url'])
if 'hours_display' in value:
tasklog.set_hours_display(value['hours_display'])
if 'id' in value:
tasklog.set_id(value['id'])
if 'log_date_format' in value:
tasklog.set_log_date_format(value['log_date_format'])
if 'task' in value:
if 'id' in value['task']:
tasklog.set_task_id(value['task']['id'])
if 'name' in value['task']:
tasklog.set_task_name(value['task']['name'])
if 'log_date' in value:
tasklog.set_log_date(value['log_date'])
if 'notes' in value:
tasklog.set_notes(value['notes'])
if 'log_date_long' in value:
tasklog.set_log_date_long(value['log_date_long'])
if 'minutes' in value:
tasklog.set_minutes(value['minutes'])
if 'owner_id' in value:
tasklog.set_owner_id(value['owner_id'])
return tasklog
def json_to_buglog(self, value):
"""This method parses the given response and returns bug log object.
Args:
value(dict): Response containing json object for bug log.
Returns:
instance: Bug log object.
"""
buglog = Buglog()
if 'owner_name' in value:
buglog.set_owner_name(value['owner_name'])
if 'hours' in value:
buglog.set_hours(value['hours'])
if 'total_minutes' in value:
buglog.set_total_minutes(value['total_minutes'])
if 'bill_status' in value:
buglog.set_bill_status(value['bill_status'])
if 'link' in value:
if 'self' in value['link']:
if 'url' in value['link']['self']['url']:
buglog.set_url(value['link']['self']['url'])
if 'hours_display' in value:
buglog.set_hours_display(value['hours_display'])
if 'id' in value:
buglog.set_id(value['id'])
if 'log_date_format' in value:
buglog.set_log_date_format(value['log_date_format'])
if 'bug' in value:
if 'id' in value['bug']:
buglog.set_bug_id(value['bug']['id'])
if 'title' in value['bug']:
buglog.set_bug_title(value['bug']['title'])
if 'log_date' in value:
buglog.set_log_date(value['log_date'])
if 'notes' in value:
buglog.set_notes(value['notes'])
if 'log_date_long' in value:
buglog.set_log_date_long(value['log_date_long'])
if 'minutes' in value:
buglog.set_minutes(value['minutes'])
if 'owner_id' in value:
buglog.set_owner_id(value['owner_id'])
return buglog
def json_to_generallog(self, value):
"""This method parses the given response and returns bug log object.
Args:
value(dict): Response containing json object for bug log.
Returns:
instance: Bug log object.
"""
generallog = Generallog()
if 'owner_name' in value:
generallog.set_owner_name(value['owner_name'])
if 'hours' in value:
generallog.set_hours(value['hours'])
if 'total_minutes' in value:
generallog.set_total_minutes(value['total_minutes'])
if 'bill_status' in value:
generallog.set_bill_status(value['bill_status'])
if 'link' in value:
if 'self' in value['link']:
if 'url' in value['link']['self']['url']:
generallog.set_url(value['link']['self']['url'])
if 'hours_display' in value:
generallog.set_hours_display(value['hours_display'])
if 'id' in value:
generallog.set_id(value['id'])
if 'log_date_format' in value:
generallog.set_log_date_format(value['log_date_format'])
if 'log_date' in value:
generallog.set_log_date(value['log_date'])
if 'notes' in value:
generallog.set_notes(value['notes'])
if 'log_date_long' in value:
generallog.set_log_date_long(value['log_date_long'])
if 'minutes' in value:
generallog.set_minutes(value['minutes'])
if 'owner_id' in value:
generallog.set_owner_id(value['owner_id'])
if 'name' in value:
generallog.set_name(value['name'])
return generallog
def to_json(self, log):
"""This method is used to convert timelog object to json format.
Args:
log(instance): Time log object.
Returns:
instance: time log object.
"""
data = {}
if log.get_log_date() != "":
data['date'] = log.get_log_date()
if log.get_bill_status() != "":
data['bill_status'] = log.get_bill_status()
if log.get_hours() != "":
data['hours'] = log.get_hours()
if log.get_notes() != "":
data['notes'] = log.get_notes()
return data
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/parser/TimesheetsParser.py",
"copies": "1",
"size": "9294",
"license": "mit",
"hash": -1816609797878351000,
"line_mean": 34.4732824427,
"line_max": 86,
"alpha_frac": 0.5420701528,
"autogenerated": false,
"ratio": 4.025119099177133,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5067189251977133,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.service.ZohoProjects import ZohoProjects
from projects.model.Event import Event
from projects.model.Participant import Participant
from projects.exception.ProjectsException import ProjectsException
authtoken = '{auth_token}'
zoho_portals = ZohoProjects(authtoken)
try:
portal_id = zoho_portals.get_portals_api().get_portals()[0].get_id()
zoho_projects = ZohoProjects(authtoken, portal_id)
projects_api = zoho_projects.get_projects_api()
project_id = projects_api.get_projects()[0].get_id()
events_api = zoho_projects.get_events_api()
#Get all events
param = {
'status': 'closed'
}
print events_api.get_events(project_id, param)
#Add event
event = Event()
event.set_title("Event 1")
event.set_scheduled_on("06-16-2014")
event.set_hour("10")
event.set_minutes("30")
event.set_ampm("am")
event.set_duration_hour("2")
event.set_duration_minutes("30")
participant = Participant()
participant.set_participant_id("2213211")
event.set_participants(participant)
print events_api.add(project_id, event)
param = {
'status': 'closed'
}
event_id = events_api.get_events(project_id, param)[0].get_id()
#Update event
event = Event()
event.set_id(event_id)
event.set_title("Event 1")
event.set_scheduled_on("06-16-2014")
event.set_hour("10")
event.set_minutes("30")
event.set_ampm("am")
event.set_duration_hour("2")
event.set_duration_minutes("30")
participant = Participant()
participant.set_participant_id("2213211")
event.set_participants(participant)
event.set_location("Chennai")
print events_api.update(project_id, event)
#Delete event
print events_api.delete(project_id, event_id)
except ProjectsException as pe:
print "Error code:" + pe.get_code() + "\nError Message: " + pe.get_message()
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "test/EventsTest.py",
"copies": "1",
"size": "1932",
"license": "mit",
"hash": 4294765870046923000,
"line_mean": 25.1081081081,
"line_max": 81,
"alpha_frac": 0.6630434783,
"autogenerated": false,
"ratio": 3.2635135135135136,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.44265569918135134,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.service.ZohoProjects import ZohoProjects
from projects.model.Milestone import Milestone
from projects.exception.ProjectsException import ProjectsException
authtoken = '{auth_token}'
zoho_portals = ZohoProjects(authtoken)
try:
portal_id = zoho_portals.get_portals_api().get_portals()[0].get_id()
zoho_projects = ZohoProjects(authtoken, portal_id)
projects_api = zoho_projects.get_projects_api()
project_id = projects_api.get_projects()[0].get_id()
owner_id = projects_api.get_projects()[0].get_owner_id()
milestone_api = zoho_projects.get_milestone_api()
milestone_id = milestone_api.get_milestones(project_id)[0].get_id()
#All milestone api
print milestone_api.get_milestones(project_id)
#Get details of a milestone
print milestone_api.get(project_id, milestone_id)
#Create milestone
milestone = Milestone()
milestone.set_name("Mile 1")
milestone.set_start_date("06-01-2014")
milestone.set_end_date("06-10-2014")
milestone.set_owner_id(owner_id)
milestone.set_flag("internal")
print milestone_api.create(project_id, milestone)
#Update milestone
milestone = Milestone()
milestone.set_id(milestone_id)
milestone.set_name("Mile 1")
milestone.set_start_date("06-01-2014")
milestone.set_end_date("06-10-2014")
milestone.set_owner_id(owner_id)
milestone.set_flag("internal")
print milestone_api.update(project_id, milestone)
#Update milestone status
status = 1
print milestone_api.update_status(project_id, milestone_id, status)
#Delete milestone
print milestone_api.delete(project_id, milestone_id)
except ProjectsException as pe:
print "Error code:" + pe.get_code() + "\nError Message: " + pe.get_message()
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "test/MilestoneTest.py",
"copies": "1",
"size": "1798",
"license": "mit",
"hash": -2309764436953712600,
"line_mean": 27.5396825397,
"line_max": 81,
"alpha_frac": 0.6996662959,
"autogenerated": false,
"ratio": 3.1710758377425043,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.4370742133642504,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.service.ZohoProjects import ZohoProjects
from projects.model.Status import Status
from projects.exception.ProjectsException import ProjectsException
authtoken = '{auth_token}'
zoho_portals = ZohoProjects(authtoken)
try:
portal_id = zoho_portals.get_portals_api().get_portals()[0].get_id()
zoho_projects = ZohoProjects(authtoken, portal_id)
projects_api = zoho_projects.get_projects_api()
dashboard_api = zoho_projects.get_dashboard_api()
project_id = projects_api.get_projects()[0].get_id()
#Get project activities
print dashboard_api.get_project_activities(project_id)
param = {
'index': 1,
'range': 1
}
print dashboard_api.get_project_activities(project_id, param)
#Get status of given project
print dashboard_api.get_statuses(project_id)
#Add status for given project
status = Status()
status.set_content("Idle")
print dashboard_api.add_status(project_id, status)
except ProjectsException as pe:
print "Error code:" + pe.get_code() + "\nError Message: " + pe.get_message()
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "test/DashboardTest.py",
"copies": "1",
"size": "1113",
"license": "mit",
"hash": 7812049854448345000,
"line_mean": 27.5384615385,
"line_max": 81,
"alpha_frac": 0.6900269542,
"autogenerated": false,
"ratio": 3.435185185185185,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4625212139385185,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.service.ZohoProjects import ZohoProjects
from projects.model.Task import Task
from projects.model.Owner import Owner
from projects.exception.ProjectsException import ProjectsException
authtoken = '{auth_token}'
zoho_portals = ZohoProjects(authtoken)
try:
portal_id = zoho_portals.get_portals_api().get_portals()[0].get_id()
zoho_projects = ZohoProjects(authtoken, portal_id)
projects_api = zoho_projects.get_projects_api()
project_id = projects_api.get_projects()[0].get_id()
owner_id = projects_api.get_projects()[0].get_owner_id()
tasks_api = zoho_projects.get_tasks_api()
tasklist_id = tasks_api.get_tasks(project_id)[0].get_tasklist().get_id()
task_id = tasks_api.get_tasks(project_id)[0].get_id()
#Get all tasks
print tasks_api.get_tasks(project_id)
param = {'status': 'all'}
print tasks_api.get_tasks(project_id, param)
#Tasks for a Tasklist
print tasks_api.get_tasklist_tasks(project_id, tasklist_id)
#Get all details of the task
print tasks_api.get(project_id, task_id)
#Create task
task = Task()
task.set_name('Task 1')
owner = Owner()
owner.set_name("abi")
task.set_details(owner)
task.set_start_date("06-11-2014")
task.set_end_date("06-21-2014")
task.set_percent_complete(1)
task.set_duration(1)
task.set_priority("High")
print tasks_api.create(project_id, task)
#Update task
task = Task()
task.set_id(task_id)
task.set_name('Task 1')
owner = Owner()
owner.set_id(owner_id)
owner.set_name("abi")
task.set_details(owner)
task.set_start_date("06-11-2014")
task.set_end_date("06-21-2014")
task.set_percent_complete(10)
task.set_duration(1)
task.set_priority("High")
print tasks_api.update(project_id, task)
print tasks_api.get_subtasks(project_id, task_id)
#Task Comment
print tasks_api.get_comments(project_id, task_id, None)
comments = tasks_api.get_comments(project_id, task_id, None)
comment_id = comments[0].get_id()
print tasks_api.add_comment(project_id, task_id, "Test Comment")
print tasks_api.delete_comment(project_id, task_id, comment_id)
#Delete task
print tasks_api.delete(project_id, task_id)
except ProjectsException as pe:
print "Error code:" + pe.get_code() + "\nError Message: " + pe.get_message()
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "test/TasksTest.py",
"copies": "1",
"size": "2430",
"license": "mit",
"hash": 9047379263948943000,
"line_mean": 25.1290322581,
"line_max": 81,
"alpha_frac": 0.6588477366,
"autogenerated": false,
"ratio": 3.0994897959183674,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42583375325183676,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.service.ZohoProjects import ZohoProjects
from projects.model.Tasklog import Tasklog
from projects.model.Buglog import Buglog
from projects.model.Generallog import Generallog
from projects.exception.ProjectsException import ProjectsException
authtoken = '{auth_token}'
zoho_portals = ZohoProjects(authtoken)
try:
portal_id = zoho_portals.get_portals_api().get_portals()[0].get_id()
zoho_projects = ZohoProjects(authtoken, portal_id)
projects_api = zoho_projects.get_projects_api()
project_id = projects_api.get_projects()[0].get_id()
timesheets_api = zoho_projects.get_timesheets_api()
tasks_api = zoho_projects.get_tasks_api()
task_id = tasks_api.get_tasks(project_id)[0].get_id()
bugs_api = zoho_projects.get_bugs_api()
bug_id = bugs_api.get_bugs(project_id)[0].get_id()
#Get all time in the given project
param = {
'index': 0,
'range': 3,
'users_list': 'all',
'view_type': 'week',
'date': '10-11-2014',
'bill_status': 'All',
'component_type': 'task'
}
print timesheets_api.get_time_logs(project_id, param)
task_log_id = timesheets_api.get_time_logs(project_id, param).get_date()[0].get_task_logs()[0].get_id()
#Add time log to task
tasklog = Tasklog()
tasklog.set_task_id(task_id)
tasklog.set_log_date("10-11-2014")
tasklog.set_bill_status("Billable")
tasklog.set_hours("12:30")
tasklog.set_notes("time_log")
print timesheets_api.add_task_log(project_id, tasklog)
#Update time log to task
tasklog = Tasklog()
tasklog.set_task_id(task_id)
tasklog.set_id(task_log_id)
tasklog.set_log_date("06-11-2014")
tasklog.set_bill_status("Billable")
tasklog.set_hours("12:30")
tasklog.set_notes("time_log")
print timesheets_api.update_task_log(project_id, tasklog)
#Delete time log to task
print timesheets_api.delete_task_log(project_id, task_id, task_log_id)
#Add Time log for bug
buglog = Buglog()
buglog.set_bug_id(bug_id)
buglog.set_log_date("06-11-2014")
buglog.set_bill_status("Billable")
buglog.set_hours("12:30")
buglog.set_notes("time_log")
print timesheets_api.add_bug_log(project_id, buglog).get_bug_title()
param = {
'index': 0,
'range': 3,
'users_list': 'all',
'view_type': 'day',
'date': '06-11-2014',
'bill_status': 'All',
'component_type': 'bug'
}
bug_log_id = timesheets_api.get_time_logs(project_id, param).get_date()[0].get_bug_logs()[0].get_id()
#Update time log for bug
buglog = Buglog()
buglog.set_id(bug_log_id)
buglog.set_bug_id(bug_id)
buglog.set_log_date("06-11-2014")
buglog.set_bill_status("Billable")
buglog.set_hours("12:30")
buglog.set_notes("time_log")
print timesheets_api.update_bug_log(project_id, buglog)
#Delete time log for bug
print timesheets_api.delete_bug_log(project_id, bug_id, bug_log_id)
#Add General log
general_log = Generallog()
general_log.set_name("General log2")
general_log.set_log_date("06-16-2014")
general_log.set_bill_status("Non Billable")
general_log.set_hours("10:33")
print timesheets_api.add_general_log(project_id, general_log)
param = {
'index': 0,
'range': 3,
'users_list': 'all',
'view_type': 'day',
'date': '06-16-2014',
'bill_status': 'All',
'component_type': 'general'
}
general_log_id = timesheets_api.get_time_logs(project_id, param).get_date()[0].get_general_logs()[0].get_id()
#Update time for general log
general_log = Generallog()
general_log.set_id(general_log_id)
general_log.set_name("General log")
general_log.set_log_date("06-16-2014")
general_log.set_bill_status("Billable")
general_log.set_hours("10:30")
print timesheets_api.add_general_log(project_id, general_log)
#Delete general time log
print timesheets_api.delete_general_log(project_id, general_log_id)
except ProjectsException as pe:
print "Error code:" + pe.get_code() + "\nError Message: " + pe.get_message()
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "test/TimesheetsTest.py",
"copies": "1",
"size": "4202",
"license": "mit",
"hash": -278451715487801900,
"line_mean": 27.3918918919,
"line_max": 113,
"alpha_frac": 0.6361256545,
"autogenerated": false,
"ratio": 3.005722460658083,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9095516289634207,
"avg_score": 0.009266365104775041,
"num_lines": 148
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.BugsParser import BugsParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = BugsParser()
class BugsApi:
"""Bugs Api class is used to
1.Get all bugs in given project.
2.Get the details of the bug.
3.Creates a bug.
4.Update a bug.
5.Delete a bug.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Bugs api using user's authtoken and portal id.
Args:
authtoken(str): User's authtoken.
portal_id(str): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_bugs(self, project_id, param=None):
"""Get all the bugs in the given project.
Args:
project_id(long): Project id.
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: List of bugs object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/bugs/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_bugs(response)
def get(self, project_id, bug_id):
"""Get the details of the bug.
Args:
project_id(long): Project id.
bug_id(long): Long id.
Returns:
list of instance: List of bug object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/bugs/' + str(bug_id) + '/'
response = zoho_http_client.get(url, self.details)
return parser.get_bugs(response)[0]
def create(self, project_id, bug):
"""Create a bug.
Args:
project_id(long): Project id.
bug(instance): Bug object.
Returns:
list of instance: list of bugs object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/bugs/'
data = parser.to_json(bug)
response = zoho_http_client.post(url, self.details, data)
return parser.get_bugs(response)[0]
def update(self, project_id, bug):
"""Update the bug.
Args:
project_id(long): Project_id.
bug(instance): Bug object.
Returns:
instance: Bug object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/bugs/' + str(bug.get_id()) + '/'
data = parser.to_json(bug)
response = zoho_http_client.post(url, self.details, data)
return parser.get_bugs(response)[0]
def delete(self, project_id, bug_id):
"""Delete the bug.
Args:
project_id(long): Project id.
bug_id(long): Bug id.
Returns:
str: Success message('Bug deleted successfully.').
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/bugs/' + str(bug_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def get_default_fields(self, project_id, query=None):
"""Get all the default fields in the given project.
Args:
project_id(long): Project id.
Returns:
list of instance: List of default field object.
"""
url = base_url+'portal/'+str(self.portal_id)+'/projects/'+str(project_id)+'/bugs/defaultfields/'
response = zoho_http_client.get(url, self.details, query);
return parser.get_default_fields(response);
def get_custom_fields(self, project_id, query=None):
"""
Get all the custom fields in the given project.
Args:
project_id(long): Project id.
Returns:
list of instance: List of Customfield object.
"""
url = base_url+'portal/'+str(self.portal_id)+'/projects/'+str(project_id)+'/bugs/customfields/'
response = zoho_http_client.get(url, self.details, query);
return parser.get_custom_fields(response);
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/BugsApi.py",
"copies": "1",
"size": "4269",
"license": "mit",
"hash": 6696853024219391000,
"line_mean": 28.8531468531,
"line_max": 126,
"alpha_frac": 0.5654720075,
"autogenerated": false,
"ratio": 3.6738382099827884,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9557581102657652,
"avg_score": 0.03634582296502732,
"num_lines": 143
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.CategoryParser import CategoryParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = CategoryParser()
class CategoryApi:
"""Category api class is used to
1.Get all the forum categories.
2.Adds the forum category.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Category api using user's authtoken and portal id.
Args:
authtoken(str): User's authtoken.
portal_id(str): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_categories(self, project_id):
"""Get all the forum categories.
Args:
project_id(long): Project id.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/categories/'
response = zoho_http_client.get(url, self.details)
return parser.get_categories(response)
def add(self, project_id, category):
"""Adds the forum category.
Args:
project_id(long): Project id.
category(instance) :Category.
Returns:
instance: Category object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/categories/'
data = parser.to_json(category)
response = zoho_http_client.post(url, self.details, data)
return parser.get_categories(response)[0]
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/CategoryApi.py",
"copies": "1",
"size": "1612",
"license": "mit",
"hash": 2605993121431860700,
"line_mean": 27.7857142857,
"line_max": 107,
"alpha_frac": 0.5986352357,
"autogenerated": false,
"ratio": 3.8564593301435406,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9775003579831503,
"avg_score": 0.03601819720240773,
"num_lines": 56
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.DashboardParser import DashboardParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = DashboardParser()
class DashboardApi:
"""Dashboard Api class is used to
1. List all recent activities of the project.
2. Get the status for the given project.
3. Add a new status for the given project.
"""
def __init__(self, authtoken, portal_id):
"""Initialize dashboard api.
Args:
authtoken(str): User's authtoken.
portal_id(int): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_project_activities(self, project_id, param=None):
"""List all recent activities of project.
Args:
project_id(long): Project id.
param(dict, optional): Filter with which the result has to be displayed.
Returns:
list of instance: List of activities object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/activities/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_activities(response)
def get_statuses(self, project_id, param=None):
"""Get the status for the given project.
Args:
project_id(str): Project id.
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: List of status object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/statuses/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_statuses(response)
def add_status(self, project_id, status):
"""Add a new status for a project.
Args:
status(instance): Status object.
Returns:
list of instance: List of status object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/statuses/'
data = parser.to_json(status)
response = zoho_http_client.post(url, self.details, data)
return parser.get_statuses(response)[0]
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/DashboardApi.py",
"copies": "1",
"size": "2396",
"license": "mit",
"hash": 5551330516750602000,
"line_mean": 30.1168831169,
"line_max": 106,
"alpha_frac": 0.5984974958,
"autogenerated": false,
"ratio": 3.9472817133443163,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5045779209144317,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.DocumentsParser import DocumentsParser
from os.path import basename
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = DocumentsParser()
class DocumentsApi:
"""Documents api class is used to
1.Gets all the documents in the given project.
2.Get the version details of the document.
3.Upload the document.
4.Upload the latest version of the document.
5.Delete document.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Documents api using user's authtoken and portal id.
Args:
authtoken(str): User's authtoken.
portal_id(str): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_documents(self, project_id, param=None):
"""Get all documents in the given project.
Args:
project_id(long): Project id.
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: List of document object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/documents/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_documents(response)
def get_version_details(self, project_id, document_id, version=None):
"""Get the version details of the document.
Args:
project_id(long): Project id.
document_id(long): Document id.
version(str, optional): Version number of the document.
Returns:
instance: documents object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/documents/' + str(document_id) + '/'
if version is not None:
param = {
'version': version
}
else:
param = None
response = zoho_http_client.get(url, self.details, param)
return parser.get_documents(response)[0]
def add(self, project_id, document):
"""Upload a document.
Args:
project_id(long): Project id.
document(instance): Document object.
Returns:
instance: Document object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/documents/'
data = parser.to_json(document)
if document.get_upload_doc():
file_list = []
for value in document.get_upload_doc():
attachment = {
'uploaddoc': {
'filename': basename(value),
'content':open(value).read()
}
}
file_list.append(attachment)
else:
file_list = []
response = zoho_http_client.post(url, self.details, data, None, file_list)
return parser.get_documents(response)[0]
def upload_latest(self, project_id, document):
"""Uploads the latset version of the document.
Args:
project_id(long): Project id.
document(instance): Document object.
Returns:
instance: Document object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/documents/' + str(document.get_id()) + '/'
data = parser.to_json(document)
if document.get_upload_doc():
file_list = []
for value in document.get_upload_doc():
attachment = {
'uploaddoc': {
'filename': basename(value),
'content':open(value).read()
}
}
file_list.append(attachment)
else:
file_list = []
response = zoho_http_client.post(url, self.details, data, None, file_list)
return parser.get_documents(response)[0]
def delete(self, project_id, document_id):
"""Delete the document.
Args:
project_id(long): Project id.
document_id(long): Document id.
Returns:
instance: Document object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/documents/' + str(document_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/DocumentsApi.py",
"copies": "1",
"size": "4748",
"license": "mit",
"hash": 7770220785722540000,
"line_mean": 32.4366197183,
"line_max": 137,
"alpha_frac": 0.5385425442,
"autogenerated": false,
"ratio": 4.239285714285714,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5277828258485714,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.EventsParser import EventsParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = EventsParser()
class EventsApi:
"""Events Api class is used to
1.Get all the events in the given project.
2.Add an event.
3.Update an existing event.
4.Delete an event.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Events api using user's authtoken and portal id.
Args:
authtoken(str): User's authtoken.
portal_id(str): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_events(self, project_id, param=None):
"""This method is used to get all the events in the given project.
Args:
project_id(long): Project id.
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: List of events object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/events/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_events(response)
def add(self, project_id, event):
"""Add event.
Args:
project_id(long): Project id.
event(instance): Event object.
Returns:
instance: Event object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/events/'
data = parser.to_json(event)
response = zoho_http_client.post(url, self.details, data)
return parser.get_events(response)[0]
def update(self, project_id, event):
"""Update event.
Args:
project_id(long): Project id.
event(instance): Event object.
Returns:
instance: Event object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/events/' + str(event.get_id()) + '/'
data = parser.to_json(event)
response = zoho_http_client.post(url, self.details, data)
return parser.get_events(response)[0]
def delete(self, project_id, event_id):
"""Delete the event.
Args:
project_id(long): Project id.
event_id(long): Event id.
Returns:
str: Success message('Event Deleted Successfully').
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/events/' + str(event_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/EventsApi.py",
"copies": "1",
"size": "2828",
"license": "mit",
"hash": -8870038037070942000,
"line_mean": 29.4086021505,
"line_max": 131,
"alpha_frac": 0.5774398868,
"autogenerated": false,
"ratio": 3.7959731543624162,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48734130411624166,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.FoldersParser import FoldersParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = FoldersParser()
class FoldersApi:
"""Folders Api class is used to
1.Get all the folders in a given api.
2.Add the given folder.
3.Updates the folder.
4.Deletes the given folder.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Folders api using user's authtoken and portal id.
Args:
authtoken(str): User's authtoken.
portal_id(str): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_folders(self, project_id):
"""This method is used to get all the folders in a given project.
Args:
project_id(long): Project id.
Returns:
list of instance: List of Folders object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/folders/'
response = zoho_http_client.get(url, self.details)
return parser.get_folders(response)
def add(self, project_id, folder):
"""Add the given folder.
Args:
project_id(long): Project id.
folder(instance): Folder object.
Returns:
instance: Folder object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/folders/'
data = parser.to_json(folder)
response = zoho_http_client.post(url, self.details, data)
return parser.get_folders(response)[0]
def update(self, project_id, folder):
"""Update folder.
Args:
project_id(long): Project id.
folder(instance): Folder object.
Returns:
instance: Folder object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/folders/' + str(folder.get_id()) + '/'
data = parser.to_json(folder)
response = zoho_http_client.post(url, self.details, data)
return parser.get_folders(response)[0]
def delete(self, project_id, folder_id):
"""Delete folder.
Args:
project_id(long): Project id.
folder_id(long): Folder id.
Returns:
instance: Folder object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/folders/' + str(folder_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/FoldersApi.py",
"copies": "1",
"size": "2737",
"license": "mit",
"hash": -5804473790714655000,
"line_mean": 28.4301075269,
"line_max": 132,
"alpha_frac": 0.5791012057,
"autogenerated": false,
"ratio": 3.7803867403314917,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9762966045888748,
"avg_score": 0.019304380028548843,
"num_lines": 93
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.ForumsParser import ForumsParser
from os.path import basename
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = ForumsParser()
class ForumsApi:
"""Forums Api class is used to
1.Get all the forums in the given project.
2.Adds the forum post.
3.Updates the forum post.
4.Deletes the forum post.
5.Gets all the forum comment.
6.Adds the forum comment.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Forums api using user's authtoken and portal id.
Args:
authtoken(str): User's authtoken.
portal_id(str): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_forums(self, project_id, param=None):
"""Get all forums in the given project.
Args:
project_id(long): Project id.
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: List of forums object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/forums/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_forums(response)
def add(self, project_id, forum):
"""Adds the forum post.
Args:
project_id(long): Project id.
forum(instance): Forum object.
Returns:
instance: Forum object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/forums/'
data = parser.to_json(forum)
if forum.get_upload_file():
file_list = []
for value in forum.get_upload_file():
attachment = {
'uploadfile': {
'filename': basename(value),
'content':open(value).read()
}
}
file_list.append(attachment)
else:
file_list = []
response = zoho_http_client.post(url, self.details, data, None, file_list)
return parser.get_forums(response)[0]
def update(self, project_id, forum):
"""Update the forum post.
Args:
project_id(long): Project id.
forum(instance): Forum object.
Returns:
instance: Forum object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/forums/' + str(forum.get_id()) + '/'
data = parser.to_json(forum)
if forum.get_upload_file():
file_list = []
for value in forum.get_upload_file():
attachment = {
'uploadfile': {
'filename': basename(value),
'content':open(value).read()
}
}
file_list.append(attachment)
else:
file_list = []
response = zoho_http_client.post(url, self.details, data, None, file_list)
return parser.get_forums(response)[0]
def delete(self, project_id, forum_id):
"""Delete the forum post.
Args:
project_id(float): Project id.
forum_id(float): Forum id.
Returns:
str: Success message('Forum Deleted Successfully')
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/forums/' + str(forum_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def get_comments(self, project_id, forum_id, param=None):
"""Get all the forum ccomments.
Args:
project_id(long): Project id.
forum_id(long): Forum id.
Returns:
list of instance: List of comment object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/forums/' + str(forum_id) + '/comments/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_comments(response)
def add_comment(self, project_id, forum_id, comment):
"""Adds the forum comment.
Args:
project_id(long): Project id.
forum_id(long): Forum id.
comment(instance): Comment object.
Returns:
instance: Forum object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/forums/' + str(forum_id) + '/comments/'
data = parser.comment_to_json(comment)
response = zoho_http_client.post(url, self.details, data)
return parser.get_comments(response)[0]
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/ForumsApi.py",
"copies": "1",
"size": "5033",
"license": "mit",
"hash": -5254343311106340000,
"line_mean": 31.6818181818,
"line_max": 134,
"alpha_frac": 0.5320882178,
"autogenerated": false,
"ratio": 3.9505494505494507,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9885664997109525,
"avg_score": 0.019394534247985436,
"num_lines": 154
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.MilestonesParser import MilestonesParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = MilestonesParser()
class MilestonesApi:
"""MilestonesApi class is used to
1.Get all milestones in a given project.
2.Get the details of the milestone.
3.Create a milestone.
4.Update an existing milestone.
5.Update the milestone status.
6.Delete milestone.
"""
def __init__(self, authtoken, portal_id):
"""Initialize milestone api.
Args:
authtoken(str): User's authtoken.
portal_id(str): USer's portal id.
"""
self.details = {
"authtoken": authtoken
}
self.portal_id = portal_id
def get_milestones(self, project_id, param=None):
"""Get all the milestones in a given project.
Args:
project_id(long): Project id.
Returns:
list of instance: List of milestones object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/milestones/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_milestones(response)
def get(self, project_id, milestone_id):
"""Get details of the milestone.
Args:
project_id(long): Project id.
milestone_id(long): Milestone id.
Returns:
list of instance: List of milestone instance.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/milestones/' + str(milestone_id) + '/'
response = zoho_http_client.get(url, self.details)
return parser.get_milestones(response)[0]
def create(self, project_id, milestone):
"""Create a milestone.
Args:
milestone(instance): Milestone object.
Returns:
instance: Milestone object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/milestones/'
data = parser.to_json(milestone)
response = zoho_http_client.post(url, self.details, data)
return parser.get_milestone(response)[0]
def update(self, project_id, milestone):
"""Update a milestone.
Args:
project_id(long): Project id.
milestone_id(long): Milestone id.
Returns:
instance: Milestone object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/milestones/' + str(milestone.get_id()) + '/'
data = parser.to_json(milestone)
response = zoho_http_client.post(url, self.details, data)
return parser.get_milestone(response)[0]
def update_status(self, project_id, milestone_id, status):
"""Update milestone status.
Args:
project_id(long): Project id.
milestone_id(long): Milestone id.
status(int): Status.Allowed values 1,2. 1- Not completed, 2- Completed.
Returns:
list of instance: List of milestones object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/milestones/' + str(milestone_id) + '/status/'
data = {
'status': status
}
response = zoho_http_client.post(url, self.details, data)
return parser.get_milestone(response)[0]
def delete(self, project_id, milestone_id):
"""Deletes the milestone.
Args:
project_id(long): Project id.
milestone_id(long): Milestone id.
Returns:
str: Success message('Milestone Deleted Successfully.')
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/milestones/' + str(milestone_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/MilestonesApi.py",
"copies": "1",
"size": "4098",
"license": "mit",
"hash": 8836999108228384000,
"line_mean": 30.7674418605,
"line_max": 139,
"alpha_frac": 0.5846754514,
"autogenerated": false,
"ratio": 3.770009199632015,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4854684651032015,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.ProjectsParser import ProjectsParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = ProjectsParser()
class ProjectsApi:
"""Projects Api class is used to
1.Get all projects.
2.Get project details.
3.Create a project.
4.Update a project.
5.Delete a project.
"""
def __init__(self, authtoken, portal_id):
"""Initialize parameters for projects api.
Args:
authtoken(str): authtoken.
portal_id(int): Portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_projects(self, param=None):
"""Get all the projects in the portal for the logged in user.
Args:
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: List of project object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_projects(response)
def get(self, project_id):
"""Get list of project details.
Args:
project_id(int): Project id.
Returns:
list of instance: List of project object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/'
response = zoho_http_client.get(url, self.details)
return parser.get_projects(response)[0]
def create(self, project):
"""Creates a new project.
Args:
project(instance): Project object.
Returns:
list of instance: List of projects object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/'
data = parser.to_json(project)
response = zoho_http_client.post(url, self.details, data)
return parser.get_projects(response)[0]
def update(self, project_id, project):
"""Update an existing project.
Args:
project_id(long): Project id.
project(instance): Project object.
Returns:
list of instance: List of projects object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/'
data = parser.to_json(project)
response = zoho_http_client.post(url, self.details, data)
return parser.get_projects(response)[0]
def delete(self, project_id):
"""Delete a project.
Args:
project_id(str): Project id.
Returns:
str: Success message("Project deleted Successfully.")
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/'
response = zoho_http_client.delete(url,self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/ProjectsApi.py",
"copies": "1",
"size": "3042",
"license": "mit",
"hash": 2391388076502064600,
"line_mean": 27.4299065421,
"line_max": 95,
"alpha_frac": 0.5798816568,
"autogenerated": false,
"ratio": 3.9353169469598965,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9893037562112024,
"avg_score": 0.024432208329574587,
"num_lines": 107
} |
#$ID$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.TaskListParser import TaskListParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = TaskListParser()
class TaskListApi:
"""TasksList Api class is used to
1. Get all the task list in a givven project.
2. Create the tasklist.
3. Update the tasklist.
4. Delete the tasklist.
"""
def __init__(self, authtoken, portal_id):
"""Initialize parameters for Tasklist api.
Args:
authtoken(str): User's authtoken.
portal_id(str): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_tasklists(self, project_id,param):
"""Get all taskslist in the given project.
Args:
project_id(long): Project id.
param(dict): Filter with which the list has to be displayed.
Returns:
list of instance: List of tasklist object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasklists/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_tasklists(response)
def create(self, project_id, tasklist):
"""Create a tasklist.
Args:
tasklist(instance): Task list object.
Returns:
list of instance: List of task list object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasklists/'
data = parser.to_json(tasklist)
response = zoho_http_client.post(url, self.details, data)
return parser.get_tasklists(response)[0]
def update(self, project_id, tasklist):
"""Update tasklist.
Args:
project_id(long): Project id.
tasklist(instance): Task list object.
Returns:
list of instance: List of task list object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasklists/' + str(tasklist.get_id()) + '/'
data = parser.to_json(tasklist)
response = zoho_http_client.post(url, self.details, data)
return parser.get_tasklists(response)[0]
def delete(self, project_id, tasklist_id):
"""Delete tasklist.
Args:
project_id(long): Project id.
tasklist_id(long): Tasklist id.
Returns:
str: Success message("Tasklist Deleted Successfully")
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasklists/' + str(tasklist_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/TaskListApi.py",
"copies": "1",
"size": "2912",
"license": "mit",
"hash": -617137824789093200,
"line_mean": 29.9787234043,
"line_max": 136,
"alpha_frac": 0.5824175824,
"autogenerated": false,
"ratio": 3.772020725388601,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9751702286311814,
"avg_score": 0.02054720429535733,
"num_lines": 94
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.TasksParser import TasksParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = TasksParser()
class TasksApi:
"""TasksApi class is used to
1.Get all tasks in the given project.
2.Get all the tasks in the given task list.
3.Get all the details for the task.
4.Create a task.
5.Update the task in the given project.
6.Delete the task in the given project.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Tasks api using user's authtoken and portal id.
Args:
authtoken(str): User's authtoken.
portal_id(int): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_tasks(self, project_id, param=None):
"""Get all the tasks in the given project.
Args:
project_id(long): project_id.
param(dict, optional): Dictionary with which the list has to be displayed.
Returns:
list of instance: List of task object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasks/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_tasks(response)
def get_tasklist_tasks(self, project_id, tasklist_id, param=None):
"""Get all the tasks in the given tasklist.
Args:
project_id(long): Project id.
tasklist_id(long): Tasklist id.
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: List of tasks object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasklists/' + str(tasklist_id) + '/tasks/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_tasks(response)
def get(self, project_id, task_id):
"""Get all details for the task.
Args:
project_id(long): Project id.
task_id(long): Task id.
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: List of tasks object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasks/' + str(task_id) + '/'
response = zoho_http_client.get(url, self.details)
return parser.get_tasks(response)[0]
def create(self, project_id, task):
"""Create a task.
Args:
project_id(long): Project id.
task(instance): Task object.
Returns:
instance: List of tasks object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasks/'
data = parser.to_json(task)
response = zoho_http_client.post(url, self.details, data)
return parser.get_tasks(response)[0]
def update(self, project_id, task):
"""Update task.
Args:
project_id(long): Project id.
task(instance): Task object.
Returns:
instance: List of tasks object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasks/' + str(task.get_id()) + '/'
data = parser.to_json(task)
response = zoho_http_client.post(url, self.details, data)
return parser.get_tasks(response)[0]
def delete(self, project_id, task_id):
"""Delete task.
Args:
project_id(long): Project id.
task_id(long): Task id.
Returns:
str: Success message("Task Deleted Successfully.).
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasks/' + str(task_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def get_subtasks(self, project_id, task_id, param=None):
"""
Get all the subtasks of the given task.
Args:
project_id(long): Project id.
task_id(long): Task id.
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: Returns list of Task object.
"""
url = base_url+'portal/'+str(self.portal_id)+'/projects/'+str(project_id)+'/tasks/'+str(task_id)+'/subtasks/'
response = zoho_http_client.get(url, self.details, param);
return parser.get_tasks(response);
def get_comments(self, project_id, task_id, param=None):
"""
Get all the comments of the given task.
Args:
project_id(long): Project id.
task_id(long): Task id.
param(dict, optional): Filter with which the list has to be displayed.
Returns:
list of instance: Returns list of Comment object.
"""
url = base_url+'portal/'+str(self.portal_id)+'/projects/'+str(project_id)+'/tasks/'+str(task_id)+'/comments/'
response = zoho_http_client.get(url, self.details, param);
return parser.get_comments(response);
def add_comment(self, project_id, task_id, content):
"""
Add the task comment.
Args:
project_id(long): Project id.
task_id(long): Task id.
content(str): Comment of the task.
Returns:
instance: Returns the Comment object.
"""
url = base_url+'portal/'+str(self.portal_id)+'/projects/'+str(project_id)+'/tasks/'+str(task_id)+'/comments/'
data = {};
data['content'] = content;
response = zoho_http_client(url, self.details, data);
return parser.get_comment(response);
def delete_comment(self, project_id, task_id, comment_id):
"""
Delete an existing comment.
Args:
project_id(long): Project id.
task_id(long): Task id.
comment_id(long): Comment id.
Returns:
str: Returns the success message(Comment Deleted Successfully).
"""
url = base_url+'portal/'+str(self.portal_id)+'/projects/'+str(project_id)+'/tasks/'+str(task_id)+'/comments/'+str(comment_id)+'/'
response = zoho_http_client.delete(url, self.details);
return parser.get_message(response);
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/TasksApi.py",
"copies": "1",
"size": "6525",
"license": "mit",
"hash": 4048119786277313500,
"line_mean": 28.7945205479,
"line_max": 136,
"alpha_frac": 0.5785440613,
"autogenerated": false,
"ratio": 3.5812294182217346,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4659773479521735,
"avg_score": null,
"num_lines": null
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.TimesheetsParser import TimesheetsParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = TimesheetsParser()
class TimesheetsApi:
"""Timesheets api is used to
1.Get all time logs in the given project.
2.Adds the time log to a task.
3.Updates the time log for a task.
4.Deletes the time log for a task.
5.Adds the time log to a bug.
6.Updates the time log for a bug.
7.Deletes the time log for a bug.
8.Add the time log to other tasks.
9.Updates the time log for other tasks.
10.Deletes the time log for other tasks.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Timesheet api.
Args:
authtoken(str): User's authtoken.
portal_id(int): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_time_logs(self, project_id, param):
"""Get all the time logs in the given project.
Args:
project_id(long): Project id.
param(dict): Filter with which list has to be displayed.
Returns:
instance: Time log object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/logs/'
response = zoho_http_client.get(url, self.details, param)
return parser.get_time_logs(response)
def add_task_log(self, project_id, tasklog):
"""Add time log to a task.
Args:
project_id(long): Project id.
task_log(instance): Task log.
Returns:
instance: Time log object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasks/' + str(tasklog.get_task_id()) + '/logs/'
data = parser.to_json(tasklog)
response = zoho_http_client.post(url, self.details, data)
return parser.get_task_logs(response)
def update_task_log(self, project_id, tasklog):
"""Update time log to a task.
Args:
project_id(long): Project id.
task_log(instance): Task log.
Returns:
instance: Time log object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasks/' + str(tasklog.get_task_id()) + '/logs/' + str(tasklog.get_id()) + '/'
data = parser.to_json(tasklog)
response = zoho_http_client.post(url, self.details, data)
return parser.get_task_logs(response)
def delete_task_log(self, project_id, task_id, log_id):
"""Delete the time log for a task.
Args:
project_id(long): Project id.
task_id(long): Task id.
log_id(long): Log id.
Returns:
str: Success message('Timesheet log Deleted Successfully.').
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/tasks/' + str(task_id) + '/logs/' + str(log_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def add_bug_log(self, project_id, bug_log):
"""Add the time log to a bug.
Args:
project_id(long): Project id.
bug_log(instance): Bug logs object.
Returns:
list of instance: bug logs object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/bugs/' + str(bug_log.get_bug_id()) + '/logs/'
data = parser.to_json(bug_log)
response = zoho_http_client.post(url, self.details, data)
return parser.get_bug_log(response)
def update_bug_log(self, project_id, bug_log):
"""Update the time log for bugs.
Args:
project_id(long): Project id.
bug_logs(instance): Bug logs object.
Returns:
list of instance: bug logs object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/bugs/' + str(bug_log.get_bug_id()) + '/logs/' + str(bug_log.get_id()) + '/'
data = parser.to_json(bug_log)
response = zoho_http_client.post(url, self.details, data)
return parser.get_bug_log(response)
def delete_bug_log(self, project_id, bug_id, log_id):
"""Delete the time for a bug.
Args:
project_id(long): Project id.
bug_id(long): Bug id.
log_id(long): Bug log id.
Returns:
str: Success message('Timesheet log Deleted Successfully.').
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/bugs/' + str(bug_id) + '/logs/' + str(log_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
def add_general_log(self, project_id, general_log):
"""Add time log to other tasks.
Args:
project_id(long): Project id.
general_log(instance): General log object.
Returns:
instance: General log object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/logs/'
data = parser.to_json(general_log)
data.update({"name": general_log.get_name()})
response = zoho_http_client.post(url, self.details, data)
return parser.get_general_log(response)
def update_general_log(self, project_id, general_log):
"""Update time log to other tasks.
Args:
project_id(long): Project id.
general_log(instance): General log object.
Returns:
instance: General log object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/logs/' + str(general_log.get_id()) + '/'
data = parser.to_json(general_log)
response = zoho_http_client.post(url, self.details, data)
return parser.get_general_log(response)
def delete_general_log(self, project_id, log_id):
"""Delete time log for other task.
Args:
project_id(long): Project id.
log_id(long): General log id.
Returns:
str: Success message('Timesheet log Deleted Successfully.').
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/logs/' + str(log_id) + '/'
response = zoho_http_client.delete(url, self.details)
return parser.get_message(response)
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/TimesheetsApi.py",
"copies": "1",
"size": "6733",
"license": "mit",
"hash": -2671298150471519000,
"line_mean": 32.8341708543,
"line_max": 171,
"alpha_frac": 0.5694341304,
"autogenerated": false,
"ratio": 3.594767752269087,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9563563992613253,
"avg_score": 0.020127578011166773,
"num_lines": 199
} |
#$Id$
from projects.util.ZohoHttpClient import ZohoHttpClient
from projects.api.Api import Api
from projects.parser.UsersParser import UsersParser
base_url = Api().base_url
zoho_http_client = ZohoHttpClient()
parser = UsersParser()
class UsersApi:
"""Users Api class is used to
1.Get all the users in the given project.
"""
def __init__(self, authtoken, portal_id):
"""Initialize Users api using user's authtoken and portal id.
Args:
authtoken(str): User's authtoken.
portal_id(str): User's portal id.
"""
self.details = {
'authtoken': authtoken
}
self.portal_id = portal_id
def get_users(self, project_id):
"""Get all the users in the given project.
Args:
project_id(long): Project id.
Returns:
list of instance: List of users object.
"""
url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/users/'
response = zoho_http_client.get(url, self.details)
return parser.get_users(response)
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/api/UsersApi.py",
"copies": "1",
"size": "1126",
"license": "mit",
"hash": -4447471433279565300,
"line_mean": 25.1860465116,
"line_max": 101,
"alpha_frac": 0.5985790409,
"autogenerated": false,
"ratio": 3.778523489932886,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48771025308328864,
"avg_score": null,
"num_lines": null
} |
# $Id:$
from pyglet.media import Source, AudioFormat, AudioData
import ctypes
import os
import math
class ProceduralSource(Source):
def __init__(self, duration, sample_rate=44800, sample_size=16):
self._duration = float(duration)
self.audio_format = AudioFormat(
channels=1,
sample_size=sample_size,
sample_rate=sample_rate)
self._offset = 0
self._bytes_per_sample = sample_size >> 3
self._bytes_per_second = self._bytes_per_sample * sample_rate
self._max_offset = int(self._bytes_per_second * self._duration)
if self._bytes_per_sample == 2:
self._max_offset &= 0xfffffffe
def _get_audio_data(self, bytes):
bytes = min(bytes, self._max_offset - self._offset)
if bytes <= 0:
return None
timestamp = float(self._offset) / self._bytes_per_second
duration = float(bytes) / self._bytes_per_second
data = self._generate_data(bytes, self._offset)
self._offset += bytes
is_eos = self._offset >= self._max_offset
return AudioData(data,
bytes,
timestamp,
duration)
def _generate_data(self, bytes, offset):
'''Generate `bytes` bytes of data.
Return data as ctypes array or string.
'''
raise NotImplementedError('abstract')
def _seek(self, timestamp):
self._offset = int(timestamp * self._bytes_per_second)
# Bound within duration
self._offset = min(max(self._offset, 0), self._max_offset)
# Align to sample
if self._bytes_per_sample == 2:
self._offset &= 0xfffffffe
class Silence(ProceduralSource):
def _generate_data(self, bytes, offset):
if self._bytes_per_sample == 1:
return '\127' * bytes
else:
return '\0' * bytes
class WhiteNoise(ProceduralSource):
def _generate_data(self, bytes, offset):
return os.urandom(bytes)
class Sine(ProceduralSource):
def __init__(self, duration, frequency=440, **kwargs):
super(Sine, self).__init__(duration, **kwargs)
self.frequency = frequency
def _generate_data(self, bytes, offset):
if self._bytes_per_sample == 1:
start = offset
samples = bytes
bias = 127
amplitude = 127
data = (ctypes.c_ubyte * samples)()
else:
start = offset >> 1
samples = bytes >> 1
bias = 0
amplitude = 32767
data = (ctypes.c_short * samples)()
step = self.frequency * (math.pi * 2) / self.audio_format.sample_rate
for i in range(samples):
data[i] = int(math.sin(step * (i + start)) * amplitude + bias)
return data
class Saw(ProceduralSource):
def __init__(self, duration, frequency=440, **kwargs):
super(Saw, self).__init__(duration, **kwargs)
self.frequency = frequency
def _generate_data(self, bytes, offset):
# XXX TODO consider offset
if self._bytes_per_sample == 1:
samples = bytes
value = 127
max = 255
min = 0
data = (ctypes.c_ubyte * samples)()
else:
samples = bytes >> 1
value = 0
max = 32767
min = -32768
data = (ctypes.c_short * samples)()
step = (max - min) * 2 * self.frequency / self.audio_format.sample_rate
for i in range(samples):
value += step
if value > max:
value = max - (value - max)
step = -step
if value < min:
value = min - (value - min)
step = -step
data[i] = value
return data
class Square(ProceduralSource):
def __init__(self, duration, frequency=440, **kwargs):
super(Square, self).__init__(duration, **kwargs)
self.frequency = frequency
def _generate_data(self, bytes, offset):
# XXX TODO consider offset
if self._bytes_per_sample == 1:
samples = bytes
value = 0
amplitude = 255
data = (ctypes.c_ubyte * samples)()
else:
samples = bytes >> 1
value = -32768
amplitude = 65535
data = (ctypes.c_short * samples)()
period = self.audio_format.sample_rate / self.frequency / 2
count = 0
for i in range(samples):
count += 1
if count == period:
value = amplitude - value
count = 0
data[i] = value
return data
| {
"repo_name": "KevinGoodsell/sympy",
"path": "sympy/thirdparty/pyglet/pyglet/media/procedural.py",
"copies": "4",
"size": "4750",
"license": "bsd-3-clause",
"hash": 3018739532857419000,
"line_mean": 31.0945945946,
"line_max": 79,
"alpha_frac": 0.5273684211,
"autogenerated": false,
"ratio": 4.181338028169014,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.001272952027262372,
"num_lines": 148
} |
# $Id$
#
from rdkit import Chem
from rdkit.Chem import rdMolDescriptors as rdMD
from rdkit import DataStructs
from rdkit import RDConfig
import unittest
def feq(v1, v2, tol=1.e-4) :
return abs(v1-v2) < tol
class TestCase(unittest.TestCase) :
def setUp(self):
pass
def testAtomPairTypes(self):
params = rdMD.AtomPairsParameters
mol = Chem.MolFromSmiles("C=C");
self.failUnless(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(0))==\
rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1)))
self.failUnless(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(0))==\
1 | (1 | 1<<params.numPiBits)<<params.numBranchBits)
mol = Chem.MolFromSmiles("C#CO");
self.failUnless(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(0))!=\
rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1)))
self.failUnless(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(0))==\
1 | (2 | 1<<params.numPiBits)<<params.numBranchBits)
self.failUnless(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1))==\
2 | (2 | 1<<params.numPiBits)<<params.numBranchBits)
self.failUnless(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(2))==\
1 | (0 | 3<<params.numPiBits)<<params.numBranchBits)
self.failUnless(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1),1)==\
1 | (2 | 1<<params.numPiBits)<<params.numBranchBits)
self.failUnless(rdMD.GetAtomPairAtomCode(mol.GetAtomWithIdx(1),2)==\
0 | (2 | 1<<params.numPiBits)<<params.numBranchBits)
def testAtomPairs(self):
m = Chem.MolFromSmiles('CCC')
fp1 = rdMD.GetAtomPairFingerprint(m)
fp2 = rdMD.GetAtomPairFingerprint(m,minLength=1,maxLength=2)
nz1 = fp1.GetNonzeroElements()
self.failUnlessEqual(len(nz1),2)
nz2 = fp2.GetNonzeroElements()
self.failUnlessEqual(len(nz2),2)
fp2 = rdMD.GetAtomPairFingerprint(m,minLength=1,maxLength=1)
nz2 = fp2.GetNonzeroElements()
self.failUnlessEqual(len(nz2),1)
def testHashedAtomPairs(self):
m = Chem.MolFromSmiles('c1ccccc1')
fp1 = rdMD.GetHashedAtomPairFingerprint(m,2048)
fp2 = rdMD.GetHashedAtomPairFingerprint(m,2048,1,3)
self.failUnless(fp1==fp2)
fp2 = rdMD.GetHashedAtomPairFingerprint(m,2048,1,2)
sim= DataStructs.DiceSimilarity(fp1,fp2)
self.failUnless(sim>0.0 and sim<1.0)
m = Chem.MolFromSmiles('c1ccccn1')
fp2 = rdMD.GetHashedAtomPairFingerprint(m,2048)
sim= DataStructs.DiceSimilarity(fp1,fp2)
self.failUnless(sim>0.0 and sim<1.0)
m = Chem.MolFromSmiles('c1ccccc1')
fp1 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m,2048)
m = Chem.MolFromSmiles('c1ccccn1')
fp2 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m,2048)
sim= DataStructs.DiceSimilarity(fp1,fp2)
self.failUnless(sim>0.0 and sim<1.0)
def testRootedAtomPairs(self):
m = Chem.MolFromSmiles('Oc1ccccc1')
fp1 = rdMD.GetAtomPairFingerprint(m)
fp2 = rdMD.GetAtomPairFingerprint(m,fromAtoms=(0,))
nz1 = fp1.GetNonzeroElements()
nz2 = fp2.GetNonzeroElements()
for k,v in nz2.iteritems():
self.failUnless(v<=nz1[k])
def testTopologicalTorsions(self):
mol = Chem.MolFromSmiles("CC");
fp = rdMD.GetTopologicalTorsionFingerprint(mol)
self.failUnless(fp.GetTotalVal()==0)
mol = Chem.MolFromSmiles("CCCC");
fp = rdMD.GetTopologicalTorsionFingerprint(mol)
self.failUnless(fp.GetTotalVal()==1)
fp = rdMD.GetTopologicalTorsionFingerprint(mol,3)
self.failUnless(fp.GetTotalVal()==2)
mol = Chem.MolFromSmiles("CCCO");
fp = rdMD.GetTopologicalTorsionFingerprint(mol)
self.failUnless(fp.GetTotalVal()==1)
fp = rdMD.GetTopologicalTorsionFingerprint(mol,3)
self.failUnless(fp.GetTotalVal()==2)
mol = Chem.MolFromSmiles("CCCCCCCCCCC");
fp = rdMD.GetTopologicalTorsionFingerprint(mol,7)
self.failUnlessRaises(ValueError,lambda : rdMD.GetTopologicalTorsionFingerprint(mol,8))
def testHashedTopologicalTorsions(self):
mol = Chem.MolFromSmiles("c1ncccc1");
fp1 = rdMD.GetHashedTopologicalTorsionFingerprint(mol)
mol = Chem.MolFromSmiles("n1ccccc1");
fp2 = rdMD.GetHashedTopologicalTorsionFingerprint(mol)
self.failUnlessEqual(DataStructs.DiceSimilarity(fp1,fp2),1.0)
def testRootedTorsions(self):
m = Chem.MolFromSmiles('Oc1ccccc1')
fp1 = rdMD.GetTopologicalTorsionFingerprint(m)
fp2 = rdMD.GetTopologicalTorsionFingerprint(m,fromAtoms=(0,))
nz1 = fp1.GetNonzeroElements()
nz2 = fp2.GetNonzeroElements()
for k,v in nz2.iteritems():
self.failUnless(v<=nz1[k])
m = Chem.MolFromSmiles('COCC')
fp1 = rdMD.GetTopologicalTorsionFingerprint(m)
self.failUnlessEqual(len(fp1.GetNonzeroElements()),1)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m,fromAtoms=(0,))
self.failUnlessEqual(len(fp1.GetNonzeroElements()),1)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m,fromAtoms=(1,))
self.failUnlessEqual(len(fp1.GetNonzeroElements()),0)
def testMorganFingerprints(self):
mol = Chem.MolFromSmiles('CC(F)(Cl)C(F)(Cl)C')
fp = rdMD.GetMorganFingerprint(mol,0)
self.failUnless(len(fp.GetNonzeroElements())==4)
mol = Chem.MolFromSmiles('CC(F)(Cl)C(F)(Cl)C')
fp = rdMD.GetHashedMorganFingerprint(mol,0)
self.failUnless(len(fp.GetNonzeroElements())==4)
fp = rdMD.GetMorganFingerprint(mol,1)
self.failUnless(len(fp.GetNonzeroElements())==8)
fp = rdMD.GetHashedMorganFingerprint(mol,1)
self.failUnless(len(fp.GetNonzeroElements())==8)
fp = rdMD.GetMorganFingerprint(mol,2)
self.failUnless(len(fp.GetNonzeroElements())==9)
mol = Chem.MolFromSmiles('CC(F)(Cl)[C@](F)(Cl)C')
fp = rdMD.GetMorganFingerprint(mol,0)
self.failUnless(len(fp.GetNonzeroElements())==4)
fp = rdMD.GetMorganFingerprint(mol,1)
self.failUnless(len(fp.GetNonzeroElements())==8)
fp = rdMD.GetMorganFingerprint(mol,2)
self.failUnless(len(fp.GetNonzeroElements())==9)
fp = rdMD.GetMorganFingerprint(mol,0,useChirality=True)
self.failUnless(len(fp.GetNonzeroElements())==4)
fp = rdMD.GetMorganFingerprint(mol,1,useChirality=True)
self.failUnless(len(fp.GetNonzeroElements())==9)
fp = rdMD.GetMorganFingerprint(mol,2,useChirality=True)
self.failUnless(len(fp.GetNonzeroElements())==10)
mol = Chem.MolFromSmiles('CCCCC')
fp = rdMD.GetMorganFingerprint(mol,0,fromAtoms=(0,))
self.failUnless(len(fp.GetNonzeroElements())==1)
mol = Chem.MolFromSmiles('CC1CC1')
vs1 = rdMD.GetConnectivityInvariants(mol)
self.failUnlessEqual(len(vs1),mol.GetNumAtoms())
fp1 = rdMD.GetMorganFingerprint(mol,2,invariants=vs1)
fp2 = rdMD.GetMorganFingerprint(mol,2)
self.failUnlessEqual(fp1,fp2)
vs2 = rdMD.GetConnectivityInvariants(mol,False)
self.failUnlessEqual(len(vs2),mol.GetNumAtoms())
self.failIfEqual(vs1,vs2)
fp1 = rdMD.GetMorganFingerprint(mol,2,invariants=vs2)
self.failIfEqual(fp1,fp2)
mol = Chem.MolFromSmiles('Cc1ccccc1')
vs1 = rdMD.GetFeatureInvariants(mol)
self.failUnlessEqual(len(vs1),mol.GetNumAtoms())
self.failUnlessEqual(vs1[0],0)
self.failIfEqual(vs1[1],0)
self.failUnlessEqual(vs1[1],vs1[2])
self.failUnlessEqual(vs1[1],vs1[3])
self.failUnlessEqual(vs1[1],vs1[4])
mol = Chem.MolFromSmiles('FCCCl')
vs1 = rdMD.GetFeatureInvariants(mol)
self.failUnlessEqual(len(vs1),mol.GetNumAtoms())
self.failUnlessEqual(vs1[1],0)
self.failUnlessEqual(vs1[2],0)
self.failIfEqual(vs1[0],0)
self.failUnlessEqual(vs1[0],vs1[3])
fp1 = rdMD.GetMorganFingerprint(mol,0,invariants=vs1)
fp2 = rdMD.GetMorganFingerprint(mol,0,useFeatures=True)
self.failUnlessEqual(fp1,fp2)
def testCrippen(self):
mol = Chem.MolFromSmiles("n1ccccc1CO");
contribs = rdMD._CalcCrippenContribs(mol)
self.failUnlessEqual(len(contribs),mol.GetNumAtoms());
ts = [0]*mol.GetNumAtoms()
contribs = rdMD._CalcCrippenContribs(mol,force=True,atomTypes=ts)
self.failUnlessEqual(ts,[59, 25, 25, 25, 25, 28, 17, 69])
ls = ['']*mol.GetNumAtoms()
contribs = rdMD._CalcCrippenContribs(mol,force=True,atomTypeLabels=ls)
self.failUnlessEqual(ls,['N11', 'C18', 'C18', 'C18', 'C18', 'C21', 'C10', 'O2'])
def testMolWt(self):
mol = Chem.MolFromSmiles("C");
amw = rdMD._CalcMolWt(mol);
self.failUnless(feq(amw,16.043,.001));
amw = rdMD._CalcMolWt(mol,True);
self.failUnless(feq(amw,12.011,.001));
mol2 = Chem.AddHs(mol);
amw = rdMD._CalcMolWt(mol2);
self.failUnless(feq(amw,16.043,.001));
amw = rdMD._CalcMolWt(mol2,True);
self.failUnless(feq(amw,12.011,.001));
mol = Chem.MolFromSmiles("C");
amw = rdMD.CalcExactMolWt(mol);
self.failUnless(feq(amw,16.031,.001));
def testPairValues(self):
import base64
testD=(('CCCO','AQAAAAQAAAAAAIAABgAAACGECAABAAAAIoQIAAEAAABBhAgAAQAAACNEGAABAAAAQUQYAAEAAABC\nRBgAAQAAAA==\n'),
('CNc1ccco1','AQAAAAQAAAAAAIAAEAAAACOECgABAAAAJIQKAAIAAABBhQoAAgAAAEKFCgABAAAAIsQKAAEAAABB\nxQoAAQAAAELFCgACAAAAIYQQAAEAAABChRAAAQAAAEOFEAACAAAAYYUQAAEAAAAjhBoAAQAAAEGF\nGgABAAAAQoUaAAIAAABhhRoAAQAAAEKIGgABAAAA\n'),
)
for smi,txt in testD:
pkl = base64.decodestring(txt)
fp = rdMD.GetAtomPairFingerprint(Chem.MolFromSmiles(smi))
fp2 = DataStructs.IntSparseIntVect(pkl)
self.failUnlessEqual(DataStructs.DiceSimilarity(fp,fp2),1.0)
self.failUnlessEqual(fp,fp2)
def testTorsionValues(self):
import base64
testD=(('CCCO','AQAAAAgAAAD/////DwAAAAEAAAAAAAAAIECAAAMAAAABAAAA\n'),
('CNc1ccco1','AQAAAAgAAAD/////DwAAAAkAAAAAAAAAIICkSAEAAAABAAAAKVKgSQEAAAABAAAAKVCgUAEAAAAB\nAAAAKVCgUQEAAAABAAAAKVCkCAIAAAABAAAAKdCkCAIAAAABAAAAKVCgSAMAAAABAAAAKVCkSAMA\nAAABAAAAIICkSAMAAAABAAAA\n'),
)
for smi,txt in testD:
pkl = base64.decodestring(txt)
fp = rdMD.GetTopologicalTorsionFingerprint(Chem.MolFromSmiles(smi))
fp2 = DataStructs.LongSparseIntVect(pkl)
self.failUnlessEqual(DataStructs.DiceSimilarity(fp,fp2),1.0)
self.failUnlessEqual(fp,fp2)
def testAtomPairOptions(self):
m1 = Chem.MolFromSmiles('c1ccccc1')
m2 = Chem.MolFromSmiles('c1ccccn1')
fp1 = rdMD.GetAtomPairFingerprint(m1)
fp2 = rdMD.GetAtomPairFingerprint(m2)
self.failIfEqual(fp1,fp2)
fp1 = rdMD.GetAtomPairFingerprint(m1,atomInvariants=[1]*6)
fp2 = rdMD.GetAtomPairFingerprint(m2,atomInvariants=[1]*6)
self.failUnlessEqual(fp1,fp2)
fp1 = rdMD.GetAtomPairFingerprint(m1,atomInvariants=[1]*6)
fp2 = rdMD.GetAtomPairFingerprint(m2,atomInvariants=[2]*6)
self.failIfEqual(fp1,fp2)
fp1 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m1)
fp2 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m2)
self.failIfEqual(fp1,fp2)
fp1 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m1,atomInvariants=[1]*6)
fp2 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m2,atomInvariants=[1]*6)
self.failUnlessEqual(fp1,fp2)
fp1 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m1,atomInvariants=[1]*6)
fp2 = rdMD.GetHashedAtomPairFingerprintAsBitVect(m2,atomInvariants=[2]*6)
self.failIfEqual(fp1,fp2)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m1)
fp2 = rdMD.GetTopologicalTorsionFingerprint(m2)
self.failIfEqual(fp1,fp2)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m1,atomInvariants=[1]*6)
fp2 = rdMD.GetTopologicalTorsionFingerprint(m2,atomInvariants=[1]*6)
self.failUnlessEqual(fp1,fp2)
fp1 = rdMD.GetTopologicalTorsionFingerprint(m1,atomInvariants=[1]*6)
fp2 = rdMD.GetTopologicalTorsionFingerprint(m2,atomInvariants=[2]*6)
self.failIfEqual(fp1,fp2)
fp1 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m1)
fp2 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m2)
self.failIfEqual(fp1,fp2)
fp1 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m1,atomInvariants=[1]*6)
fp2 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m2,atomInvariants=[1]*6)
self.failUnlessEqual(fp1,fp2)
fp1 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m1,atomInvariants=[1]*6)
fp2 = rdMD.GetHashedTopologicalTorsionFingerprintAsBitVect(m2,atomInvariants=[2]*6)
self.failIfEqual(fp1,fp2)
def testMolFormula(self):
m = Chem.MolFromSmiles("[2H]C([3H])O")
formula = rdMD.CalcMolFormula(m)
self.failUnlessEqual(formula,'CH4O')
formula = rdMD.CalcMolFormula(m,separateIsotopes=True)
self.failUnlessEqual(formula,'CH2DTO')
formula = rdMD.CalcMolFormula(m,separateIsotopes=True,abbreviateHIsotopes=False)
self.failUnlessEqual(formula,'CH2[2H][3H]O')
m = Chem.MolFromSmiles("[2H][13CH2]CO")
formula = rdMD.CalcMolFormula(m)
self.failUnlessEqual(formula,'C2H6O')
formula = rdMD.CalcMolFormula(m,separateIsotopes=True)
self.failUnlessEqual(formula,'C[13C]H5DO')
if __name__ == '__main__':
unittest.main()
| {
"repo_name": "rdkit/rdkit-orig",
"path": "Code/GraphMol/Descriptors/Wrap/testMolDescriptors.py",
"copies": "1",
"size": "12931",
"license": "bsd-3-clause",
"hash": -4842987770882704000,
"line_mean": 39.1583850932,
"line_max": 226,
"alpha_frac": 0.7157219086,
"autogenerated": false,
"ratio": 2.8025574338968355,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40182793424968355,
"avg_score": null,
"num_lines": null
} |
# $Id$
from unittest import TestCase
from canary.context import Context
from canary.resolver import find_resolver, Resolver
class ResolverTests (TestCase):
context = Context()
OCLC_GATEWAY_URL = 'http://worldcatlibraries.org/registry/gateway'
def test_default (self):
r = find_resolver(self.context, '999.9.9.9')
self.assertEqual(self.OCLC_GATEWAY_URL, r.base_url)
def test_default_text (self):
r = find_resolver(self.context, '999.9.9.9')
self.assertEqual('Find in a library', r.link_text)
def test_simple (self):
# curtis.med.yale.edu
r = find_resolver(self.context, '128.36.123.51')
self.failIfEqual(self.OCLC_GATEWAY_URL, r.base_url)
def test_yale (self):
# NOTE: this will fail if YUL's resolver location changes
r = find_resolver(self.context, '128.36.123.51')
self.assertEqual('http://sfx.library.yale.edu/sfx_local', r.base_url)
def test_umich (self):
# NOTE: this will fail if UMich resolver location changes
r = find_resolver(self.context, '141.211.2.202')
self.assertEqual('http://sfx.lib.umich.edu:9003/sfx_locater', r.base_url)
| {
"repo_name": "dchud/sentinel",
"path": "test/resolver.py",
"copies": "1",
"size": "1204",
"license": "mit",
"hash": -1088029680344466800,
"line_mean": 32.4444444444,
"line_max": 81,
"alpha_frac": 0.6470099668,
"autogenerated": false,
"ratio": 3.236559139784946,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4383569106584946,
"avg_score": null,
"num_lines": null
} |
# $Id$
from unittest import TestCase
from canary.context import Context
from canary.stats import *
from canary.search import RecordSearcher
class StatsTests (TestCase):
context = Context()
def setUp (self):
# each test gets a new collector
self.collector = StatCollector(self.context)
# get some records for statistics generation once
searcher = RecordSearcher(self.context)
self.records = searcher.search('environment')
def test_curators (self):
handler = ArticleTypeHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_article_types (self):
handler = ArticleTypeHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_methodology_samplings (self):
handler = MethodologySamplingHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_methology_types (self):
handler = MethodologyTypeHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_methology_timings (self):
handler = MethodologyTimingHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_methology_controls (self):
handler = MethodologyControlHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_exposure_routes (self):
handler = ExposureRouteHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_exposures (self):
handler = ExposureHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_risk_factors (self):
handler = RiskFactorHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_outcomes (self):
handler = OutcomeHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_species (self):
handler = SpeciesHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
def test_locations (self):
handler = LocationHandler()
self.collector.add_handler(handler)
self.collector.process(self.records)
self.assertEquals(len(handler.stats.keys()) > 0, True)
| {
"repo_name": "dchud/sentinel",
"path": "test/stats.py",
"copies": "1",
"size": "3210",
"license": "mit",
"hash": -5261885974166921000,
"line_mean": 34.6666666667,
"line_max": 62,
"alpha_frac": 0.6657320872,
"autogenerated": false,
"ratio": 4.0125,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.51782320872,
"avg_score": null,
"num_lines": null
} |
#$Id$#
from urllib import urlencode,urlretrieve
from httplib2 import Http
from json import dumps,loads
from re import search
from mimetypes import guess_type
from string import digits,ascii_letters
from random import choice
from books.exception.BooksException import BooksException
_BOUNDARY_CHARS = digits + ascii_letters
class ZohoHttpClient:
"""This class is used to create get, post, put and delete connections
for the request."""
def get(self, url, details, query=None):
"""This method is used to make get request for the given url and
query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
query(dict, optional): Additional parameters. Default to None.
Returns:
dict: Dictionary containing response content.
"""
http, headers = get_http_connection()
url = url + '?' + form_query_string(details)
if query is not None:
url += '&' + form_query_string(query)
resp, content = http.request(url, 'GET', headers=headers)
#print content
response = get_response(resp['status'], content)
return response
def getfile(self, url, details, query=None):
"""This method is used to make get request for the given url and
query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
query(dict, optional): Additional parameters. Default to None.
Returns:
dict: Dictionary containing response content.
"""
http = Http(timeout=60*1000)
url = url + '?' + form_query_string(details)
if query is not None:
url += '&' + form_query_string(query)
headers = {
'Accept': \
'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Content-type': 'application/x-www-form-urlencoded',
'User-Agent': 'ZohoBooks-Python-Wrappers/1.0',
'Accept-Charset': 'UTF-8'
}
resp, content = http.request(url, 'GET', headers=headers)
if resp['status'] == '200' or resp['status'] == '201':
attachment = resp['content-disposition']
regex = search(r'".*"',attachment)
filename = regex.group().strip('"') if regex is not None else \
'attachment.' + query['accept']
file_location = "/home/likewise-open/ZOHOCORP/chitra-2327/Downloads/"
file_name = open(file_location + filename, "w")
file_name.write(content)
file_name.close()
return file_name
else:
raise BooksException(convert(loads(content)))
def post(self, url, details, field, query_string=None, attachment=None):
"""This method is used to make post request for the given url
and query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
data(dict): Dictionary containing required parameters.
query(dict, optional): Additional parameters. Default to None.
attachment(dict, None): Files to be attached. Default to None.
Returns:
tuple: Tuple containing response status(str) and content(dict).
"""
http, headers = get_http_connection()
url = url + '?' + form_query_string(details)
if query_string is not None:
url = url + '&' + form_query_string(query_string)
if attachment is None:
body = urlencode(field)
else:
body, headers = encode_multipart(field, attachment)
resp, content = http.request(url, 'POST', headers=headers,
body=body)
#print content
response = get_response(resp['status'], content)
return response
def put(self, url, details, field, query=None, attachment=None):
"""This method is used to make put request for the given url and
query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
data(dict): Dictionary containing required parameters.
query(dict, optional): Additional parameters. Default to None.
attachment(dict, None): Files to be attached. Default to None.
Returns:
tuple: Tuple containing response status(str) and content(dict).
"""
http, headers = get_http_connection()
url = url + '?' + form_query_string(details)
if query is not None:
url = url + '&' + form_query_string(query)
if attachment is None:
body = urlencode(field)
else:
body, headers = encode_multipart(field, attachment)
resp, content = http.request(url, 'PUT', headers=headers,
body=body)
#print content
response = get_response(resp['status'], content)
return response
def delete(self, url, details, param=None):
"""This method is used to make delete request for the given url and
query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
param(dict): Parameters to be passed in query string.
Returns:
tuple: Tuple containing response status(str) and content(dict).
"""
http, headers = get_http_connection()
url = url + '?' + form_query_string(details)
if param is not None:
url = url + '&' + form_query_string(param)
response, content = http.request(url, 'DELETE', headers=headers)
#print content
response = get_response(response['status'], content)
return response
def form_query_string(parameter):
"""This method is used to form query string.
Args:
parameter(dict): Parameters to be converted to query string.
Returns:
str: Query string.
"""
query = ''
length = len(parameter)
for key, value in parameter.items():
length = length-1
query += str(key) + '=' + str(value)
if length != 0:
query += '&'
return query
def encode_multipart(fields, file_list, boundary=None):
"""This method is used to encode multipart data.
Args:
fields(dict): Parameters in key value pair.
files(dict): Files to be attached.
boundary(str, optional): Boundary. Default to None.
Returns:
tuple: Tuple containing body(list) and headers(dict).
"""
def escape_quote(s):
return s.replace('"', '\\"')
if boundary is None:
boundary = ''.join(choice(_BOUNDARY_CHARS) for i in range(30))
lines = []
if fields['JSONString'] != '""':
for name, value in fields.items():
lines.extend(('--{0}'.format(boundary),
'Content-Disposition: form-data; name="{0}"'\
.format(escape_quote(name)),
'',
str(value),
))
for files in file_list:
for name, value in files.items():
filename = value['filename']
if 'mimetype' in value:
mimetype = value['mimetype']
else:
mimetype = guess_type(filename)[0] or \
'application/octet-stream'
lines.extend(('--{0}'.format(boundary),\
'Content-Disposition: form-data; name="{0}";filename="{1}"'\
.format(escape_quote(name),
escape_quote(filename)),
'Content-Type: {0}'.format(mimetype),
'',
value['content'],
))
lines.extend(('--{0}--'.format(boundary),'',))
body = '\r\n'.join(lines)
headers = {
'Content-Type': 'multipart/form-data; boundary={0}'\
.format(boundary),
'Content-Length': str(len(body)),
}
return (body, headers)
def convert(input):
"""This method is used to convert unicode objects into strings.
Args:
input(dict): dictionary of unicode objects
Returns:
dict: dictionary of string
"""
if isinstance(input, dict):
return {convert(key): convert(value) for key, value in \
input.iteritems()}
elif isinstance(input, list):
return [convert(element) for element in input]
elif isinstance(input, unicode):
return input.encode('utf-8')
else:
return input
def get_response(status, content):
"""This method checks the status code and returns respective response
message or exception.
Args:
status(str): Response status code.
content(dict): Dictionary containing code and message.
Returns:
dict: Response message
Raises:
Books Exception: If status is not '200' or '201'.
"""
resp = loads(content)
response = convert(resp)
if status != '200' and status != '201':
raise BooksException(response)
else:
return response
def get_http_connection():
http = Http(timeout=60*1000)
headers = {
'Accept': 'application/json',
'Content-type': 'application/x-www-form-urlencoded',
'User-Agent': 'ZohoBooks-Python-Wrappers/1.0',
'Accept-Charset': 'UTF-8'
}
return http, headers
| {
"repo_name": "zoho/books-python-wrappers",
"path": "books/util/ZohoHttpClient.py",
"copies": "1",
"size": "9878",
"license": "mit",
"hash": 7889225427765470000,
"line_mean": 34.153024911,
"line_max": 81,
"alpha_frac": 0.5649929135,
"autogenerated": false,
"ratio": 4.498178506375227,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5563171419875227,
"avg_score": null,
"num_lines": null
} |
#$Id$
from urllib import urlencode,urlretrieve
from httplib2 import Http
from json import dumps,loads
from re import search
from mimetypes import guess_type
from string import digits,ascii_letters
from random import choice
from projects.exception.ProjectsException import ProjectsException
_BOUNDARY_CHARS = digits + ascii_letters
class ZohoHttpClient:
"""This class is used to create get, post, put and delete connections
for the request."""
def get(self, url, details, query=None):
"""This method is used to make get request for the given url and
query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
query(dict, optional): Additional parameters. Default to None.
Returns:
dict: Dictionary containing response content.
"""
http, headers = get_http_connection()
url = url + '?' + form_query_string(details)
if query is not None:
url += '&' + form_query_string(query)
#print url
resp, content = http.request(url, 'GET', headers=headers)
#print resp
#print content
response = get_response(resp['status'], content)
return response
def getfile(self, url, details, query=None):
"""This method is used to make get request for the given url and
query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
query(dict, optional): Additional parameters. Default to None.
Returns:
dict: Dictionary containing response content.
"""
http = Http(timeout=60*1000)
url = url + '?' + form_query_string(details)
if query is not None:
url += '&' + form_query_string(query)
headers = {
'Accept': \
'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Content-type': 'application/x-www-form-urlencoded',
'User-Agent': 'ZohoProjects-Python-Wrappers/1.0',
'Accept-Charset': 'UTF-8'
}
resp, content = http.request(url, 'GET', headers=headers)
if resp['status'] == '200' or resp['status'] == '201':
attachment = resp['content-disposition']
regex = search(r'".*"',attachment)
filename = regex.group().strip('"') if regex is not None else \
'attachment.' + query['accept']
file_location = "/home/likewise-open/ZOHOCORP/chitra-2327/Downloads/"
file_name = open(file_location + filename, "w")
file_name.write(content)
file_name.close()
return file_name
else:
raise ProjectsException(convert(loads(content)))
def post(self, url, details, field, query_string=None, attachment=None):
"""This method is used to make post request for the given url
and query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
data(dict): Dictionary containing required parameters.
query(dict, optional): Additional parameters. Default to None.
attachment(dict, None): Files to be attached. Default to None.
Returns:
tuple: Tuple containing response status(str) and content(dict).
"""
http, headers = get_http_connection()
url = url + '?' + form_query_string(details)
if query_string is not None:
url = url + '&' + form_query_string(query_string)
if attachment is None:
body = urlencode(field)
else:
body, headers = encode_multipart(field, attachment)
resp, content = http.request(url, 'POST', headers=headers,
body=body)
#print url
#print content
response = get_response(resp['status'], content)
return response
def put(self, url, details, field, query=None, attachment=None):
"""This method is used to make put request for the given url and
query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
data(dict): Dictionary containing required parameters.
query(dict, optional): Additional parameters. Default to None.
attachment(dict, None): Files to be attached. Default to None.
Returns:
tuple: Tuple containing response status(str) and content(dict).
"""
http, headers = get_http_connection()
url = url + '?' + form_query_string(details)
if query is not None:
url = url + '&' + form_query_string(query)
if attachment is None:
body = urlencode(field)
else:
body, headers = encode_multipart(field, attachment)
resp, content = http.request(url, 'PUT', headers=headers,
body=body)
#print content
response = get_response(resp['status'], content)
return response
def delete(self, url, details, param=None):
"""This method is used to make delete request for the given url and
query string.
Args:
url(str): Url passed by the user.
details(dict): Dictionary containing authtoken and organization id.
param(dict): Parameters to be passed in query string.
Returns:
tuple: Tuple containing response status(str) and content(dict).
"""
http, headers = get_http_connection()
url = url + '?' + form_query_string(details)
if param is not None:
url = url + '&' + form_query_string(param)
response, content = http.request(url, 'DELETE', headers=headers)
#print content
response = get_response(response['status'], content)
return response
def form_query_string(parameter):
"""This method is used to form query string.
Args:
parameter(dict): Parameters to be converted to query string.
Returns:
str: Query string.
"""
query = ''
length = len(parameter)
for key, value in parameter.items():
length = length-1
query += str(key) + '=' + str(value)
if length != 0:
query += '&'
return query
def encode_multipart(fields, file_list, boundary=None):
"""This method is used to encode multipart data.
Args:
fields(dict): Parameters in key value pair.
files(dict): Files to be attached.
boundary(str, optional): Boundary. Default to None.
Returns:
tuple: Tuple containing body(list) and headers(dict).
"""
def escape_quote(s):
return s.replace('"', '\\"')
if boundary is None:
boundary = ''.join(choice(_BOUNDARY_CHARS) for i in range(30))
lines = []
if fields != '""':
for name, value in fields.items():
lines.extend(('--{0}'.format(boundary),
'Content-Disposition: form-data; name="{0}"'\
.format(escape_quote(name)),
'',
str(value),
))
for files in file_list:
for name, value in files.items():
filename = value['filename']
if 'mimetype' in value:
mimetype = value['mimetype']
else:
mimetype = guess_type(filename)[0] or \
'application/octet-stream'
lines.extend(('--{0}'.format(boundary),\
'Content-Disposition: form-data; name="{0}";filename="{1}"'\
.format(escape_quote(name),
escape_quote(filename)),
'Content-Type: {0}'.format(mimetype),
'',
value['content'],
))
lines.extend(('--{0}--'.format(boundary),'',))
body = '\r\n'.join(lines)
headers = {
'Content-Type': 'multipart/form-data; boundary={0}'\
.format(boundary),
'Content-Length': str(len(body)),
}
return (body, headers)
def convert(input):
"""This method is used to convert unicode objects into strings.
Args:
input(dict): dictionary of unicode objects
Returns:
dict: dictionary of string
"""
if isinstance(input, dict):
return {convert(key): convert(value) for key, value in \
input.iteritems()}
elif isinstance(input, list):
return [convert(element) for element in input]
elif isinstance(input, unicode):
return input.encode('utf-8')
else:
return input
def get_response(status, content):
"""This method checks the status code and returns respective response
message or exception.
Args:
status(str): Response status code.
content(dict): Dictionary containing code and message.
Returns:
dict: Response message
Raises:
Projects Exception: If status is not '200' or '201'.
"""
if status == '204' or status == '404':
error = {
'code': status,
'message': "No content for the requested resource."
}
raise ProjectsException(error)
resp = loads(content)
response = convert(resp)
if status != '200' and status != '201':
raise ProjectsException(response['error'])
else:
return response
def get_http_connection():
http = Http(timeout=60*1000)
headers = {
'Accept': 'application/json',
'Content-type': 'application/x-www-form-urlencoded',
'User-Agent': 'ZohoProjects-Python-Wrappers/1.0',
'Accept-Charset': 'UTF-8'
}
return http, headers
| {
"repo_name": "zoho/projects-python-wrappers",
"path": "projects/util/ZohoHttpClient.py",
"copies": "1",
"size": "10159",
"license": "mit",
"hash": 4843706148375089000,
"line_mean": 33.910652921,
"line_max": 81,
"alpha_frac": 0.5638350231,
"autogenerated": false,
"ratio": 4.519128113879003,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.015629389461277263,
"num_lines": 291
} |
# $Id$
"""Generic Routing Encapsulation."""
import struct
import dpkt
GRE_CP = 0x8000 # Checksum Present
GRE_RP = 0x4000 # Routing Present
GRE_KP = 0x2000 # Key Present
GRE_SP = 0x1000 # Sequence Present
GRE_SS = 0x0800 # Strict Source Route
GRE_AP = 0x0080 # Acknowledgment Present
GRE_opt_fields = (
(GRE_CP|GRE_RP, 'sum', 'H'), (GRE_CP|GRE_RP, 'off', 'H'),
(GRE_KP, 'key', 'I'), (GRE_SP, 'seq', 'I'), (GRE_AP, 'ack', 'I')
)
class GRE(dpkt.Packet):
__hdr__ = (
('flags', 'H', 0),
('p', 'H', 0x0800), # ETH_TYPE_IP
)
_protosw = {}
sre = ()
def get_v(self):
return self.flags & 0x7
def set_v(self, v):
self.flags = (self.flags & ~0x7) | (v & 0x7)
v = property(get_v, set_v)
def get_recur(self):
return (self.flags >> 5) & 0x7
def set_recur(self, v):
self.flags = (self.flags & ~0xe0) | ((v & 0x7) << 5)
recur = property(get_recur, set_recur)
class SRE(dpkt.Packet):
__hdr__ = [
('family', 'H', 0),
('off', 'B', 0),
('len', 'B', 0)
]
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
self.data = self.data[:self.len]
def opt_fields_fmts(self):
if self.v == 0:
fields, fmts = [], []
opt_fields = GRE_opt_fields
else:
fields, fmts = [ 'len', 'callid' ], [ 'H', 'H' ]
opt_fields = GRE_opt_fields[-2:]
for flags, field, fmt in opt_fields:
if self.flags & flags:
fields.append(field)
fmts.append(fmt)
return fields, fmts
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
fields, fmts = self.opt_fields_fmts()
if fields:
fmt = ''.join(fmts)
fmtlen = struct.calcsize(fmt)
vals = struct.unpack(fmt, self.data[:fmtlen])
self.data = self.data[fmtlen:]
self.__dict__.update(dict(zip(fields, vals)))
if self.flags & GRE_RP:
l = []
while True:
sre = self.SRE(self.data)
self.data = self.data[len(sre):]
l.append(sre)
if not sre.len:
break
self.sre = l
self.data = ethernet.Ethernet._typesw[self.p](self.data)
setattr(self, self.data.__class__.__name__.lower(), self.data)
def __len__(self):
opt_fmtlen = struct.calcsize(''.join(self.opt_fields_fmts()[1]))
return self.__hdr_len__ + opt_fmtlen + \
sum(map(len, self.sre)) + len(self.data)
# XXX - need to fix up repr to display optional fields...
def __str__(self):
fields, fmts = self.opt_fields_fmts()
if fields:
vals = []
for f in fields:
vals.append(getattr(self, f))
opt_s = struct.pack(''.join(fmts), *vals)
else:
opt_s = ''
return self.pack_hdr() + opt_s + ''.join(map(str, self.sre)) + \
str(self.data)
# XXX - auto-load GRE dispatch table from Ethernet dispatch table
import ethernet
GRE._protosw.update(ethernet.Ethernet._typesw)
| {
"repo_name": "tthtlc/dpkt",
"path": "dpkt/gre.py",
"copies": "17",
"size": "3225",
"license": "bsd-3-clause",
"hash": 1681907018322016500,
"line_mean": 30.3106796117,
"line_max": 72,
"alpha_frac": 0.4998449612,
"autogenerated": false,
"ratio": 3.208955223880597,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
# $Id$
import array
# CRC-32C Checksum
# http://tools.ietf.org/html/rfc3309
crc32c_table = (
0x00000000L, 0xF26B8303L, 0xE13B70F7L, 0x1350F3F4L, 0xC79A971FL,
0x35F1141CL, 0x26A1E7E8L, 0xD4CA64EBL, 0x8AD958CFL, 0x78B2DBCCL,
0x6BE22838L, 0x9989AB3BL, 0x4D43CFD0L, 0xBF284CD3L, 0xAC78BF27L,
0x5E133C24L, 0x105EC76FL, 0xE235446CL, 0xF165B798L, 0x030E349BL,
0xD7C45070L, 0x25AFD373L, 0x36FF2087L, 0xC494A384L, 0x9A879FA0L,
0x68EC1CA3L, 0x7BBCEF57L, 0x89D76C54L, 0x5D1D08BFL, 0xAF768BBCL,
0xBC267848L, 0x4E4DFB4BL, 0x20BD8EDEL, 0xD2D60DDDL, 0xC186FE29L,
0x33ED7D2AL, 0xE72719C1L, 0x154C9AC2L, 0x061C6936L, 0xF477EA35L,
0xAA64D611L, 0x580F5512L, 0x4B5FA6E6L, 0xB93425E5L, 0x6DFE410EL,
0x9F95C20DL, 0x8CC531F9L, 0x7EAEB2FAL, 0x30E349B1L, 0xC288CAB2L,
0xD1D83946L, 0x23B3BA45L, 0xF779DEAEL, 0x05125DADL, 0x1642AE59L,
0xE4292D5AL, 0xBA3A117EL, 0x4851927DL, 0x5B016189L, 0xA96AE28AL,
0x7DA08661L, 0x8FCB0562L, 0x9C9BF696L, 0x6EF07595L, 0x417B1DBCL,
0xB3109EBFL, 0xA0406D4BL, 0x522BEE48L, 0x86E18AA3L, 0x748A09A0L,
0x67DAFA54L, 0x95B17957L, 0xCBA24573L, 0x39C9C670L, 0x2A993584L,
0xD8F2B687L, 0x0C38D26CL, 0xFE53516FL, 0xED03A29BL, 0x1F682198L,
0x5125DAD3L, 0xA34E59D0L, 0xB01EAA24L, 0x42752927L, 0x96BF4DCCL,
0x64D4CECFL, 0x77843D3BL, 0x85EFBE38L, 0xDBFC821CL, 0x2997011FL,
0x3AC7F2EBL, 0xC8AC71E8L, 0x1C661503L, 0xEE0D9600L, 0xFD5D65F4L,
0x0F36E6F7L, 0x61C69362L, 0x93AD1061L, 0x80FDE395L, 0x72966096L,
0xA65C047DL, 0x5437877EL, 0x4767748AL, 0xB50CF789L, 0xEB1FCBADL,
0x197448AEL, 0x0A24BB5AL, 0xF84F3859L, 0x2C855CB2L, 0xDEEEDFB1L,
0xCDBE2C45L, 0x3FD5AF46L, 0x7198540DL, 0x83F3D70EL, 0x90A324FAL,
0x62C8A7F9L, 0xB602C312L, 0x44694011L, 0x5739B3E5L, 0xA55230E6L,
0xFB410CC2L, 0x092A8FC1L, 0x1A7A7C35L, 0xE811FF36L, 0x3CDB9BDDL,
0xCEB018DEL, 0xDDE0EB2AL, 0x2F8B6829L, 0x82F63B78L, 0x709DB87BL,
0x63CD4B8FL, 0x91A6C88CL, 0x456CAC67L, 0xB7072F64L, 0xA457DC90L,
0x563C5F93L, 0x082F63B7L, 0xFA44E0B4L, 0xE9141340L, 0x1B7F9043L,
0xCFB5F4A8L, 0x3DDE77ABL, 0x2E8E845FL, 0xDCE5075CL, 0x92A8FC17L,
0x60C37F14L, 0x73938CE0L, 0x81F80FE3L, 0x55326B08L, 0xA759E80BL,
0xB4091BFFL, 0x466298FCL, 0x1871A4D8L, 0xEA1A27DBL, 0xF94AD42FL,
0x0B21572CL, 0xDFEB33C7L, 0x2D80B0C4L, 0x3ED04330L, 0xCCBBC033L,
0xA24BB5A6L, 0x502036A5L, 0x4370C551L, 0xB11B4652L, 0x65D122B9L,
0x97BAA1BAL, 0x84EA524EL, 0x7681D14DL, 0x2892ED69L, 0xDAF96E6AL,
0xC9A99D9EL, 0x3BC21E9DL, 0xEF087A76L, 0x1D63F975L, 0x0E330A81L,
0xFC588982L, 0xB21572C9L, 0x407EF1CAL, 0x532E023EL, 0xA145813DL,
0x758FE5D6L, 0x87E466D5L, 0x94B49521L, 0x66DF1622L, 0x38CC2A06L,
0xCAA7A905L, 0xD9F75AF1L, 0x2B9CD9F2L, 0xFF56BD19L, 0x0D3D3E1AL,
0x1E6DCDEEL, 0xEC064EEDL, 0xC38D26C4L, 0x31E6A5C7L, 0x22B65633L,
0xD0DDD530L, 0x0417B1DBL, 0xF67C32D8L, 0xE52CC12CL, 0x1747422FL,
0x49547E0BL, 0xBB3FFD08L, 0xA86F0EFCL, 0x5A048DFFL, 0x8ECEE914L,
0x7CA56A17L, 0x6FF599E3L, 0x9D9E1AE0L, 0xD3D3E1ABL, 0x21B862A8L,
0x32E8915CL, 0xC083125FL, 0x144976B4L, 0xE622F5B7L, 0xF5720643L,
0x07198540L, 0x590AB964L, 0xAB613A67L, 0xB831C993L, 0x4A5A4A90L,
0x9E902E7BL, 0x6CFBAD78L, 0x7FAB5E8CL, 0x8DC0DD8FL, 0xE330A81AL,
0x115B2B19L, 0x020BD8EDL, 0xF0605BEEL, 0x24AA3F05L, 0xD6C1BC06L,
0xC5914FF2L, 0x37FACCF1L, 0x69E9F0D5L, 0x9B8273D6L, 0x88D28022L,
0x7AB90321L, 0xAE7367CAL, 0x5C18E4C9L, 0x4F48173DL, 0xBD23943EL,
0xF36E6F75L, 0x0105EC76L, 0x12551F82L, 0xE03E9C81L, 0x34F4F86AL,
0xC69F7B69L, 0xD5CF889DL, 0x27A40B9EL, 0x79B737BAL, 0x8BDCB4B9L,
0x988C474DL, 0x6AE7C44EL, 0xBE2DA0A5L, 0x4C4623A6L, 0x5F16D052L,
0xAD7D5351L
)
def add(crc, buf):
buf = array.array('B', buf)
for b in buf:
crc = (crc >> 8) ^ crc32c_table[(crc ^ b) & 0xff]
return crc
def done(crc):
tmp = ~crc & 0xffffffffL
b0 = tmp & 0xff
b1 = (tmp >> 8) & 0xff
b2 = (tmp >> 16) & 0xff
b3 = (tmp >> 24) & 0xff
crc = (b0 << 24) | (b1 << 16) | (b2 << 8) | b3
return crc
def cksum(buf):
"""Return computed CRC-32c checksum."""
return done(add(0xffffffffL, buf))
| {
"repo_name": "lzp819739483/dpkt",
"path": "dpkt/crc32c.py",
"copies": "17",
"size": "4094",
"license": "bsd-3-clause",
"hash": 6837719413612497000,
"line_mean": 50.175,
"line_max": 68,
"alpha_frac": 0.7603810454,
"autogenerated": false,
"ratio": 1.695940347970174,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0022724780701754385,
"num_lines": 80
} |
# $Id$
import canary.context
from canary.loader import QueuedRecord
def records_by_heading (context, term):
results = []
# GROUP BY reference_id because some terms repeat w/diff qualifiers
cursor = context.get_cursor()
cursor.execute("""
SELECT reference_id
FROM reference_mesh
WHERE term = %s
GROUP BY reference_id
""", term)
id_rows = cursor.fetchall()
for id_row in id_rows:
cursor.execute("""
SELECT authors, title, source, pubmed_id
FROM sentinel_studies
WHERE reference_id = %s
""", id_row[0])
rows = cursor.fetchall()
for row in rows:
results.append((row[0], row[1], row[2], row[3]))
return results
def records_by_heading_index (context):
cursor = context.get_cursor()
cursor.execute("""
SELECT term, COUNT(term) as termcount, COUNT(DISTINCT reference_id) AS idcount
FROM reference_mesh
GROUP BY term
HAVING COUNT(term) > 4
ORDER BY idcount DESC, term
""")
results = []
rows = cursor.fetchall()
for row in rows:
if row == None: break
results.append((row[0], row[1], row[2]))
return results
# NOTE: queued_record.status is hard-coded to 2
def records_by_journal (context, issn, term_map={}):
journal_title = ''
queued_records = []
issn_terms = term_map['issn']
issn_clause = ' OR '.join(['queued_record_metadata.term_id=%s' % term.uid for term in issn_terms])
cursor = context.get_cursor()
cursor.execute("""
SELECT journal_title
FROM medline_journals
WHERE issn = %s
""", issn)
try:
rows = cursor.fetchall()
if len(rows) != 1:
raise Exception('Journal %s not found' % issn)
journal_title = rows[0][0]
select_clause = """
SELECT queued_records.uid
FROM queued_records, queued_record_metadata, studies
WHERE queued_record_metadata.queued_record_id = queued_records.uid
AND queued_records.uid = studies.record_id
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
AND (%s)
""" % issn_clause
cursor.execute(select_clause + """
AND queued_record_metadata.value = %s
""", issn
)
rows = cursor.fetchall()
for row in rows:
queued_record = QueuedRecord(context, row[0])
queued_records.append(queued_record)
except Exception, e:
context.logger.error('Records by journal: %s', e)
return journal_title, queued_records
def records_by_journal_index (context, term_map={}):
cursor = context.get_cursor()
results = []
issn_terms = term_map['issn']
issn_clause = ' OR '.join(['term_id=%s' % term.uid for term in issn_terms])
select_clause = """
SELECT COUNT(*) AS the_count, value, journal_title, abbreviation
FROM queued_record_metadata, queued_records, medline_journals, studies
WHERE queued_record_metadata.queued_record_id = queued_records.uid
AND queued_record_metadata.value = medline_journals.issn
AND queued_records.uid = studies.record_id
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
AND (%s)
GROUP BY value
ORDER BY journal_title
""" % issn_clause
cursor.execute(select_clause)
rows = cursor.fetchall()
for row in rows:
results.append((row[0], row[1], row[2], row[3]))
return results
def records_by_methodology (context, methodology_id):
queued_records = []
cursor = context.get_cursor()
try:
cursor.execute("""
SELECT queued_records.uid
FROM queued_records, studies, methodologies
WHERE queued_records.uid = studies.record_id
AND studies.uid = methodologies.study_id
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
AND methodologies.study_type_id = %s
""", methodology_id
)
rows = cursor.fetchall()
for row in rows:
queued_record = QueuedRecord(context, row[0])
queued_records.append(queued_record)
except Exception, e:
context.logger.error('Records by methodology: %s', e)
return queued_records
def records_by_methodology_index (context):
cursor = context.get_cursor()
cursor.execute("""
SELECT study_type_id, COUNT(study_type_id) as the_count
FROM methodologies, studies, queued_records
WHERE methodologies.study_id = studies.uid
AND studies.record_id = queued_records.uid
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
GROUP BY study_type_id
""")
results = []
rows = cursor.fetchall()
for row in rows:
results.append((row[0], row[1]))
return results
def records_by_year (context, year, term_map={}):
cursor = context.get_cursor()
queued_records = []
year_terms = term_map['pubdate']
year_clause = ' OR '.join(['queued_record_metadata.term_id=%s' % term.uid for term in year_terms])
try:
select_clause = """
SELECT queued_records.uid
FROM queued_records, queued_record_metadata, studies
WHERE queued_records.uid = queued_record_metadata.queued_record_id
AND queued_records.uid = studies.record_id
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
AND (%s)
""" % year_clause
cursor.execute(select_clause + """
AND SUBSTRING(queued_record_metadata.value, 1, 4) LIKE %s
""", str(year) + '%'
)
rows = cursor.fetchall()
for row in rows:
queued_record = QueuedRecord(context, row[0])
queued_records.append(queued_record)
except Exception, e:
context.logger.error('Records by year: %s', e)
return queued_records
def records_by_year_index (context, term_map={}):
cursor = context.get_cursor()
results = []
year_terms = term_map['pubdate']
year_clause = ' OR '.join(['term_id=%s' % term.uid for term in year_terms])
select_clause = """
SELECT COUNT(*) AS the_count, SUBSTRING(value, 1, 4) AS the_year
FROM queued_record_metadata, queued_records, studies
WHERE queued_record_metadata.queued_record_id = queued_records.uid
AND queued_records.uid = studies.record_id
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
AND (%s)
GROUP BY SUBSTRING(value, 1, 4)
ORDER BY value DESC
""" % year_clause
cursor.execute(select_clause)
rows = cursor.fetchall()
for row in rows:
results.append((row[0], row[1]))
return results
def records_by_author (context, author):
cursor = context.get_cursor()
queued_records = []
source_catalog = context.get_source_catalog()
complete_mapping = source_catalog.get_complete_mapping()
term_list = complete_mapping['author']
term_clause = ' OR '.join(['queued_record_metadata.term_id=%s' % term.uid for term in term_list])
try:
select_clause = """
SELECT queued_records.uid
FROM queued_records, queued_record_metadata, studies
WHERE queued_records.uid = queued_record_metadata.queued_record_id
AND queued_records.uid = studies.record_id
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
AND (%s)
""" % term_clause
cursor.execute(select_clause + """
AND value LIKE %s
""", str(author) + '%'
)
rows = cursor.fetchall()
for row in rows:
queued_record = QueuedRecord(context, row[0])
queued_records.append(queued_record)
except Exception, e:
context.logger.error('Records by author: %s', e)
return queued_records
def records_by_author_index (context):
cursor = context.get_cursor()
results = []
source_catalog = context.get_source_catalog()
complete_mapping = source_catalog.get_complete_mapping()
term_list = complete_mapping['author']
term_clause = ' OR '.join(['term_id=%s' % term.uid for term in term_list])
select_clause = """
SELECT COUNT(*) AS the_count, value
FROM queued_record_metadata, queued_records, studies
WHERE queued_record_metadata.queued_record_id = queued_records.uid
AND queued_records.uid = studies.record_id
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
AND (%s)
GROUP BY value
ORDER BY value
""" % term_clause
cursor.execute(select_clause)
rows = cursor.fetchall()
results.extend([(r[0], r[1]) for r in rows])
return results
concept_tables = {
'exposure': 'exposures',
'outcome': 'outcomes',
'species': 'species',
'risk_factor': 'risk_factors',
}
def records_by_concept (context, concept, concept_id):
cursor = context.get_cursor()
queued_records = []
table_name = concept_tables[concept]
try:
select_clause = """
SELECT queued_records.uid
FROM queued_records, studies, %s
WHERE %s.study_id = studies.uid
AND studies.record_id = queued_records.uid
AND %s.concept_id = %s
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
""" % (table_name,
table_name,
table_name, concept_id)
cursor.execute(select_clause)
rows = cursor.fetchall()
for row in rows:
queued_record = QueuedRecord(context, row[0])
queued_records.append(queued_record)
except Exception, e:
context.logger.error('Records by concept: %s', e)
return queued_records
def records_by_concept_index (context, concept):
cursor = context.get_cursor()
results = []
table_name = concept_tables[concept]
select_clause = """
SELECT COUNT(concept_id) AS the_count, concept_id, term
FROM %s, studies, queued_records
WHERE %s.study_id = studies.uid
AND studies.record_id = queued_records.uid
AND queued_records.uid = studies.record_id
AND queued_records.status = 2
AND studies.article_type >= 2
AND studies.article_type < 8
GROUP BY concept_id
ORDER BY term
""" % (table_name, table_name)
cursor.execute(select_clause)
rows = cursor.fetchall()
results.extend([(r[0], r[1], r[2]) for r in rows])
return results
| {
"repo_name": "dchud/sentinel",
"path": "canary/browse.py",
"copies": "1",
"size": "11203",
"license": "mit",
"hash": -1068167146241494900,
"line_mean": 31.95,
"line_max": 102,
"alpha_frac": 0.5884138177,
"autogenerated": false,
"ratio": 3.848505668155273,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49369194858552734,
"avg_score": null,
"num_lines": null
} |
# $Id$
import cgi, fakefile
# classes involving calling jonpy-style handlers from WSGI server connectors
class Request(cgi.Request):
def _init(self, environ, start_response):
self.environ = environ
self.stdin = environ["wsgi.input"]
self._wsgi_start_response = start_response
self._wsgi_write = None
super(Request, self)._init()
def output_headers(self):
if self._doneHeaders:
raise cgi.SequencingError("output_headers() called twice")
self._wsgi_write = self._wsgi_start_response(
self.get_header("Status") or "200 OK",
[header for header in self._headers if header[0].lower() != "status"])
self._doneHeaders = 1
def process(self, environ, start_response):
self._init(environ, start_response)
try:
handler = self._handler_type()
except:
self.traceback()
else:
try:
handler.process(self)
except:
handler.traceback(self)
self.flush()
return []
def error(self, s):
self.environ["wsgi.errors"].write(s)
def _write(self, s):
if not self.aborted:
if not self._doneHeaders:
self.output_headers()
self._wsgi_write(s)
class GZipRequest(cgi.GZipMixIn, Request):
pass
class Application(object):
def __init__(self, handler_type, request_type=Request):
self._handler_type = handler_type
self._request_type = request_type
def __call__(self, environ, start_response):
return self._request_type(self._handler_type).process(
environ, start_response)
# classes involving calling WSGI applications from jonpy-style server connectors
class Handler(cgi.Handler):
def process(self, req):
environ = dict(req.environ.items())
environ["wsgi.version"] = (1, 0)
environ["wsgi.input"] = req
environ["wsgi.errors"] = fakefile.FakeOutput(req.error)
environ["wsgi.multithread"] = 1
environ["wsgi.multiprocess"] = 1
environ["wsgi.run_once"] = isinstance(req, cgi.CGIRequest)
# is this right? PEP-333 seems to assume it is
if environ.get("HTTPS") in ("on", "1"):
environ["wsgi.url_scheme"] = "https"
else:
environ["wsgi.url_scheme"] = "http"
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if req.get_header("Status") is not None:
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None
elif req.get_header("Status") is not None:
raise AssertionError("start_response() called twice")
req.set_header("Status", status)
for header, value in response_headers:
req.add_header(header, value)
return req.write
appiter = self.application[0](environ, start_response)
try:
for s in appiter:
if s:
req.write(s)
req.flush()
finally:
if hasattr(appiter, "close"):
appiter.close()
class DebugHandler(cgi.DebugHandlerMixIn, Handler):
pass
def create_handler(application, handler_type=Handler):
class BoundHandler(handler_type):
pass
BoundHandler.application = (application,)
return BoundHandler
| {
"repo_name": "jribbens/jonpy",
"path": "jon/wsgi.py",
"copies": "1",
"size": "3111",
"license": "mit",
"hash": 104432721281414980,
"line_mean": 26.5309734513,
"line_max": 80,
"alpha_frac": 0.6467373835,
"autogenerated": false,
"ratio": 3.664310954063604,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48110483375636043,
"avg_score": null,
"num_lines": null
} |
# $Id$
import copy
import email
import logging
import re
import time
import traceback
import types
import dtuple
from canary.context import Cacheable
import canary.search
from canary.source_catalog import Source, Term, SourceCatalog
from canary.study import Study
from canary.utils import DTable
class Queue:
def __init__ (self):
self.batches = []
self.logger = logging.getLogger(str(self.__class__))
def get_batch (self, batch_id):
for batch in self.batches:
if batch.uid == batch_id:
return batch
return None
def get_batch_by_name (self, name):
for batch in self.batches:
if batch.name == name:
return batch
return None
def load (self, context):
cursor = context.get_cursor()
cursor.execute("""
SELECT *
FROM queued_batches
ORDER BY uid, date_added
""")
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
batch = Batch()
for field in fields:
batch.set(field, row[field])
self.batches.append(batch)
def find_needed_papers (context):
""" Find queued papers marked with "needs_paper" == 1."""
cursor = context.get_cursor()
records = []
cursor.execute("""
SELECT uid
FROM queued_records
WHERE needs_paper = 1
AND queued_records.status < %s
""", QueuedRecord.STATUS_CURATED)
rows = cursor.fetchall()
for row in rows:
rec = QueuedRecord(context, row[0])
records.append(rec)
return records
class QueuedRecord (Cacheable, DTable):
STATUS_UNCLAIMED = 0
STATUS_CLAIMED = 1
STATUS_CURATED = 2
CACHE_KEY = 'record'
def __init__ (self, context=None, uid=-1, *args, **kwargs):
try:
if self.queued_batch_id >= 0:
return
except AttributeError:
pass
self.uid = uid
self.queued_batch_id = -1
self.user_id = ''
self.status = self.STATUS_UNCLAIMED
self.study_id = -1
self.metadata = {}
self.title = ''
self.source = ''
self.unique_identifier = ''
self.duplicate_score = 0
self.needs_paper = int(False)
def __str__ (self):
out = []
out.append('<QueuedRecord uid=%s queued_batch_id=%s' % (self.uid, self.queued_batch_id))
out.append('\tstatus=%s study_id=%s' % (self.get_status(text=True),
self.study_id))
return '\n'.join(out)
def add_metadata (self, term, value, extra=''):
"""
Add a metadata value for a given source metadata field,
appending to a list or setting the single value depending
on whether the term allows multiple values.
"""
md_key = (term.source_id, term.uid)
if term.is_multivalue:
if self.metadata.has_key(md_key):
if not value in self.metadata[md_key]:
self.metadata[md_key].append(value)
else:
self.metadata[md_key] = [value]
else:
self.metadata[md_key] = value
def get_metadata (self, term):
"""
Get the metadata value, of the list of values, for a given sources
metadata field.
"""
md_key = (term.source_id, term.uid)
if self.metadata.has_key(md_key):
return self.metadata[md_key]
else:
# FIXME: temporary fix, better API would be better
if not term.is_multivalue:
return ''
else:
return []
def set_metadata (self, term, value):
"""
Override existing metadata values with a new value or values.
Don't allow multivalues for non-multivalue terms, though.
"""
md_key = (term.source_id, term.uid)
if isinstance(value, types.ListType) \
or isinstance(value, types.TupleType):
if not term.is_multivalue:
raise ValueError, 'Term does not allow multiple values'
self.metadata[md_key] = value
def get_mapped_metadata (self, term_map={}):
"""
For every field in this record that is mapped from a specific source,
return a map to its metadata values.
If an md value comes back == '' or [], it probably doesn't exist for that
record and term. We don't want to clobber things like titles from
one source when other source-titles aren't present, so don't overwrite
values unless value is blank.
"""
mapped_metadata = {}
term_info_set = [(mapped_name, term) for mapped_name, term in term_map.items()]
for mapped_name, terms in term_info_set:
if isinstance(terms, types.ListType):
for term in terms:
md = self.get_metadata(term)
if md:
mapped_metadata[mapped_name] = md
else:
if not mapped_metadata.has_key(mapped_name):
mapped_metadata[mapped_name] = md
else:
# terms is really a single item
term = terms
md = self.get_metadata(term)
if md:
mapped_metadata[mapped_name] = md
else:
if not mapped_metadata.has_key(mapped_name):
mapped_metadata[mapped_name] = ''
return mapped_metadata
def check_for_duplicates (self, context, term_map={}):
"""
Simple tests to determine if this record is likely to be a
duplicate of an existing record.
"""
cursor = context.get_cursor()
score = 0
potential_dupes = {}
for field in ('unique_identifier', 'title', 'source'):
# First, check for this exact same field value
if len(term_map[field]) > 1:
select_clause = """
SELECT DISTINCT queued_record_id
FROM queued_record_metadata
WHERE ("""
select_clause += ' OR '.join(['(term_id = %s) ' % \
term.uid for term in term_map[field]])
select_clause += ')'
else:
select_clause = """
SELECT DISTINCT queued_record_id
FROM queued_record_metadata
WHERE term_id = %s""" % term_map[field].uid
cursor.execute(select_clause + """
AND value = %s
AND queued_record_id != %s
""", (getattr(self, field), self.uid))
rows = cursor.fetchall()
for row in rows:
rec_id = row[0]
try:
potential_dupes[rec_id].append(field)
except:
potential_dupes[rec_id] = [field]
return potential_dupes
def load (self, context, load_metadata=True):
"""
Load a queued record.
Note that if a source is not specified, every term will be
looked-up again from the DB (rather than read from memory).
"""
# Is it already loaded? Convenience check for client calls
# don't need to verify loads from the cache. It's possible it's
# already loaded without metadata, in which case: reload.
if context.config.use_cache:
try:
if self.queued_batch_id >= 0 \
and self.metadata:
# Already loaded
return
except AttributeError:
# Not already loaded, so continue
pass
cursor = context.get_cursor()
try:
# To be safe, specify full table.field names because names overlap
cursor.execute("""
SELECT queued_records.uid, queued_records.queued_batch_id,
queued_records.status, queued_records.user_id,
queued_records.user_id, queued_records.study_id,
queued_records.title, queued_records.source,
queued_records.unique_identifier, queued_records.duplicate_score,
queued_records.needs_paper, queued_batches.source_id AS batch_source_id
FROM queued_records, queued_batches
WHERE queued_records.uid = %s
AND queued_batches.uid = queued_records.queued_batch_id
""", int(self.uid))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
if not rows:
raise ValueError('No records found')
row = dtuple.DatabaseTuple(desc, rows[0])
# remove source_id from fields, it's not a proper attribute on self
fields.remove('batch_source_id')
# but save it for later!
batch_source_id = row['batch_source_id']
for field in fields:
self.set(field, row[field])
source_catalog = context.get_source_catalog()
if load_metadata:
# NOTE: the "ORDER BY sequence_position" might be a bad hack,
# but it should preserve author name order.
# FIXME if not. And TESTME!
cursor.execute("""
SELECT *
FROM queued_record_metadata
WHERE queued_record_id = %s
ORDER BY sequence_position
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
term = source_catalog.get_term(row['term_id'])
self.add_metadata(term, row['value'], extra=row['extra'])
except ValueError:
raise ValueError('Record not found')
def save (self, context):
cursor = context.get_cursor()
try:
if self.uid == -1:
cursor.execute("""
INSERT INTO queued_records
(uid,
queued_batch_id, status, user_id, study_id,
title, source, unique_identifier, duplicate_score, needs_paper)
VALUES (NULL,
%s, %s, %s, %s,
%s, %s, %s, %s, %s)
""", (self.queued_batch_id, self.status, self.user_id, self.study_id,
self.title, self.source, self.unique_identifier, self.duplicate_score, self.needs_paper)
)
self.uid = self.get_new_uid(context)
else:
cursor.execute("""
UPDATE queued_records
SET queued_batch_id = %s, status = %s, user_id = %s, study_id = %s,
title = %s, source = %s, unique_identifier = %s, duplicate_score = %s, needs_paper = %s
WHERE uid = %s
""", (self.queued_batch_id, self.status, self.user_id, self.study_id,
self.title, self.source, self.unique_identifier, self.duplicate_score, self.needs_paper,
self.uid)
)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
cursor.execute("""
DELETE FROM queued_record_metadata
WHERE queued_record_id = %s
""", self.uid)
for key, val in self.metadata.items():
# FIXME: extra?
source_id, term_id = key
if isinstance(val, types.ListType):
for value in val:
# Automatically save the ordering of each value
self.save_metadata_value(context,source_id, term_id,
value, sequence_position=val.index(value))
else:
self.save_metadata_value(context,source_id, term_id, val)
if context.config.use_cache:
context.cache_set('%s:%s' % (self.CACHE_KEY, self.uid), self)
# Index, first deleting just in case.
search_index = canary.search.SearchIndex(context)
search_index.unindex_record(self)
search_index.index_record(self)
except Exception, e:
context.logger.error('Save queued record: %s (%s)', self.uid, e)
def save_metadata_value (self, context, source_id, term_id, value,
sequence_position=0, extra=None):
cursor = context.get_cursor()
# FIXME: extra?
cursor.execute("""
INSERT INTO queued_record_metadata
(uid, queued_record_id, source_id,
term_id, value, sequence_position, extra)
VALUES (NULL, %s, %s,
%s, %s, %s, NULL)
""", (self.uid, source_id,
term_id, value, sequence_position)
)
def get_status (self, text=False):
try:
status = int(self.status)
except ValueError:
pass
if not text:
return self.status
else:
if status == self.STATUS_UNCLAIMED:
return 'unclaimed'
elif status == self.STATUS_CLAIMED:
return 'claimed'
elif status == self.STATUS_CURATED:
return 'curated'
else:
return ''
def delete (self, context):
cursor = context.get_cursor()
try:
# First, remove the study (connected table records will
# also be deleted). But don't delete non-existent studies.
if self.study_id >= 0:
study = Study(context, self.study_id)
study.delete(context)
# Next, unindex it.
search_index = canary.search.SearchIndex(context)
search_index.unindex_record(self)
# Then, remove the metadata
cursor.execute("""
DELETE FROM queued_record_metadata
WHERE queued_record_id = %s
""", self.uid)
# Finally, remove this record itself.
cursor.execute("""
DELETE FROM queued_records
WHERE uid = %s
""", self.uid)
if context.config.use_cache:
context.cache_delete('record:%s' % self.uid)
except Exception, e:
context.logger.error('Delete queued record %s (%s)', self.uid, e)
class Batch (DTable):
def __init__ (self, uid=-1, file_name='', source_id=-1):
self.uid = uid
self.file_name = file_name
self.source_id = source_id
self.num_records = 0
self.name = ''
self.date_added = ''
self.notes = ''
self.queued_records = {}
self.loaded_records = []
self.logger = logging.getLogger(str(self.__class__))
def add_records (self, records):
for record in records:
self.loaded_records.append(record)
self.num_records += 1
def find_duplicates (self, context, use_loaded=True):
cursor = context.get_cursor()
source_catalog = context.get_source_catalog()
complete_term_map = source_catalog.get_complete_mapping()
source = source_catalog.get_source(self.source_id)
if use_loaded:
for rec in self.loaded_records:
rec.load(context)
rec.check_for_duplicates(context, complete_term_map)
else:
for id, rec in self.queued_records.items():
rec.check_for_duplicates(context, complete_term_map)
def get_statistics (self, context):
"""
Return the state of a Batch based on the number of unclaimed,
claimed, and finished QueuedRecords it contains.
"""
cursor = context.get_cursor()
stats = {}
stats['unclaimed'] = stats['claimed'] = stats['curated'] = 0
cursor.execute("""
SELECT DISTINCT(status), COUNT(*) AS the_count
FROM queued_records
WHERE queued_batch_id = %s
GROUP BY status
ORDER BY status
""", self.uid)
rows = cursor.fetchall()
for row in rows:
if row[0] == QueuedRecord.STATUS_UNCLAIMED:
stats['unclaimed'] = row[1]
elif row[0] == QueuedRecord.STATUS_CLAIMED:
stats['claimed'] = row[1]
elif row[0] == QueuedRecord.STATUS_CURATED:
stats['curated'] = row[1]
stats['total'] = stats['unclaimed'] + stats['claimed'] + stats['curated']
stats['all'] = stats['total']
stats['unfinished'] = stats['unclaimed'] + stats['claimed']
return stats
def load (self, context, show='unfinished', start=0, size=25):
"""
Load a batch and its queued records.
"""
if self.uid == -1:
return
cursor = context.get_cursor()
cursor.execute("""
SELECT *
FROM queued_batches
WHERE uid = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
for field in fields:
self.set(field, row[field])
show = str(show)
if show == 'unfinished':
show_clause = ' AND status < %s ' % QueuedRecord.STATUS_CURATED
elif show == 'unclaimed':
show_clause = ' AND status = %s ' % QueuedRecord.STATUS_UNCLAIMED
elif show == 'all':
show_clause = ' AND 1 '
if str(size) == 'all':
limit_clause = ''
else:
limit_clause = ' LIMIT %s, %s ' % (int(start), int(size))
cursor.execute("""
SELECT *
FROM queued_records
WHERE queued_batch_id = %s
""" + show_clause + """
ORDER BY uid
""" + limit_clause, (int(self.uid)))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
record = QueuedRecord(context, row['uid'])
self.queued_records[record.uid] = record
self.num_records = len(self.queued_records)
def save (self, context):
# Update num_records
self.num_records = len(self.queued_records) + len(self.loaded_records)
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO queued_batches
(uid, file_name, source_id, num_records, name, notes,
date_added)
VALUES
(NULL, %s, %s, %s, %s, %s,
CURDATE())
""", (self.file_name, self.source_id, self.num_records, self.name, self.notes)
)
self.uid = self.get_new_uid(context)
self.date_added = time.strftime(str('%Y-%m-%d'))
else:
cursor.execute("""
UPDATE queued_batches
SET file_name = %s, source_id = %s, num_records = %s, name = %s, notes = %s
WHERE uid = %s
""", (self.file_name, self.source_id, self.num_records, self.name, self.notes,
self.uid)
)
for record in self.loaded_records:
record.queued_batch_id = self.uid
record.save(context)
def delete (self, context):
""" Delete this batch and all of its records
(and all of their respective data)."""
try:
for id, rec in self.queued_records.items():
rec.delete(context)
cursor = context.get_cursor()
cursor.execute("""
DELETE FROM queued_batches
WHERE uid = %s
""", self.uid)
except Exception, e:
self.logger.error(e)
class Parser:
def __init__ (self, source=None):
self.re_result_sep = re.compile(source.re_result_sep)
self.re_term_token = re.compile(source.re_term_token)
self.source = source
self.logger = logging.getLogger(str(self.__class__))
def parse (self, file_name='', mapped_terms={}, is_email=True, data=[]):
lines = data
if lines == [] \
and not file_name =='':
try:
file = open(file_name)
if is_email:
data = email.message_from_file(file)
lines = data.get_payload().split('\n')
else:
lines = file.read().split('\n')
file.close()
except Exception, e:
self.logger.error(e)
return []
records = []
value = ''
current_token = current_value = ''
current_record = QueuedRecord()
for line in lines:
try:
if self.re_result_sep.match(line):
# Matches record separator, so is either first record or new record
if len(current_record.metadata) > 0:
# Must be new record, but don't miss last token/value for current_record
self._add_metadata(current_token, current_value, current_record)
mapped_metadata = current_record.get_mapped_metadata(mapped_terms)
current_record.title = mapped_metadata['title']
current_record.source = mapped_metadata['source']
current_record.unique_identifier = mapped_metadata['unique_identifier']
records.append(current_record)
current_token = current_value = ''
current_record = QueuedRecord()
else:
# More info for current_record
match = self.re_term_token.match(line)
if match:
# Line contains token, i.e. is start of value
# Note: match.group(0) == line (damn that snake!)
token = match.group(1)
value = match.group(2)
self._add_metadata(current_token, current_value, current_record)
current_token = token
current_value = value
else:
# Line does not contain token, i.e. is value cont'd or blank
if not line.strip() == '':
# Line isn't blank, so it continues a value
current_value = current_value + ' ' + line.strip()
else:
# blank line
pass
except Exception, e:
self.logger.error(e)
continue
# Note: we don't catch the last record in the loop above,
# so do it 'manually' here
if not current_record == None \
and not current_record.metadata == {}:
self._add_metadata(current_token, current_value, current_record)
mapped_metadata = current_record.get_mapped_metadata(mapped_terms)
current_record.title = mapped_metadata['title']
current_record.source = mapped_metadata['source']
current_record.unique_identifier = mapped_metadata['unique_identifier']
records.append(current_record)
return records
def _add_metadata (self, token, value, record):
term = self.source.get_term_from_token(token)
if term:
if term.is_multivalue \
and not term.re_multivalue_sep == '':
values = value.split(term.re_multivalue_sep)
for val in values:
record.add_metadata(term, val.strip())
else:
record.add_metadata(term, value)
| {
"repo_name": "dchud/sentinel",
"path": "canary/loader.py",
"copies": "1",
"size": "25156",
"license": "mit",
"hash": -8046762084937634000,
"line_mean": 36.2130177515,
"line_max": 108,
"alpha_frac": 0.5065193194,
"autogenerated": false,
"ratio": 4.44138418079096,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.544790350019096,
"avg_score": null,
"num_lines": null
} |
# $Id$
import copy
import logging
import time
import traceback
import types
from quixote import form2
from quixote.html import htmltext
import canary.context
from canary.gazeteer import Feature
from canary.qx_defs import MyForm
from canary.utils import DTable, render_capitalized
import dtuple
class ExposureRoute (DTable):
# A Methodology can have one to many ROUTEs
ROUTE = {
'-': -1,
'ingestion' : 1,
'inhalation' : 2,
'mucocutaneous' : 3,
'vector' : 4,
'other' : 5,
}
def __init__ (self):
self.uid = -1
self.study_id = -1
self.methodology_id = -1
self.route = self.ROUTE['-']
def __str__ (self):
out = []
out.append('<Route uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\troute=%s' % self.get_text_value(self.ROUTE, self.route))
out.append('\tmethodology_id=%s' % self.methodology_id)
out.append('/>')
return '\n'.join(out)
def get_text_value (self, lookup_table, value):
for k, v in lookup_table.iteritems():
if v == value:
return k
return ''
def set_route (self, route):
if type(route) is types.StringType:
if route in self.ROUTE.keys():
self.route = self.ROUTE[route]
elif type(route) is types.IntType:
if route in self.ROUTE.values():
self.route = route
def get_route (self, text=False):
if text:
return self.get_text_value(self.ROUTE, self.route)
else:
return self.route
def delete (self, context):
"""
Delete this route from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM exposure_routes
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('ExposureRoute: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO exposure_routes
(uid, study_id, methodology_id, route)
VALUES
(NULL, %s, %s, %s)
""", (self.study_id, self.methodology_id, self.route)
)
self.uid = self.get_new_uid(context)
else:
# Assume all calls to save() are after all routes have been removed
# already by "DELETE FROM exposure_routes" in methodology.save()
try:
cursor.execute("""
INSERT INTO exposure_routes
(uid, study_id, methodology_id, route)
VALUES
(%s, %s, %s, %s)
""", (self.uid, self.study_id, self.methodology_id, self.route)
)
except Exception, e:
context.logger.error('ExposureRoute: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
class Methodology (DTable):
TABLE_NAME = 'methodologies'
# A Methodology must have one TYPE
TYPES = {
'experimental' : 1,
'descriptive' : 2,
'aggregate' : 3,
'cross sectional' : 4,
'cohort' : 5,
'case control' : 6,
'disease model' : 7,
}
# A Methodology can have at most one TIMING
TIMING = {
'-': -1,
'unknown' : 0,
'historical' : 1,
'concurrent' : 2,
'repeated' : 3,
'mixed' : 4,
}
# A Methodology can have at most one SAMPLING
SAMPLING = {
'-': -1,
'unknown' : 0,
'exposure' : 1,
'outcome' : 2,
'both' : 3,
}
# A Methodology can have at most one CONTROLS
CONTROLS = {
'-': -1,
'no' : 0,
'yes' : 1,
'both' : 2,
}
def __init__ (self, uid=-1):
self.uid = uid
self.study_id = -1
self.study_type_id = -1
self.sample_size = ''
self.timing = -1
self.sampling = -1
self.controls = -1
self.is_mesocosm = False
self.is_enclosure = False
self.exposure_routes = []
self.comments = ''
self.date_modified = None
self.date_entered = None
def __str__ (self):
out = []
out.append('<Methodology uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tstudy_type=%s' % self.get_text_value(self.TYPES, self.study_type_id))
out.append('\tsample_size=%s' % self.sample_size)
for item in ['timing', 'sampling', 'controls', 'exposure_routes']:
out.append('\t%s=%s' % (item, getattr(self, 'get_' + item)(text=True)))
out.append('\tis_mesocosm=%s, is_enclosure=%s' % (self.is_mesocosm, self.is_enclosure))
out.append('\tcomments=%s' % self.comments or '')
out.append('/>')
return '\n'.join(out)
def evidence_level (self):
"""
Return the evidence level relative to the type of study
performed.
"""
text_value = self.get_text_value(self.TYPES, self.study_type_id)
if text_value in ['experimental', 'cohort']:
return 3
elif text_value in ['case control', 'cross sectional', 'aggregate']:
return 2
elif text_value in ['descriptive', 'disease model']:
return 1
else:
return 0
def get_text_value (self, lookup_table, value):
for k, v in lookup_table.iteritems():
if v == value:
return k
return ''
def set_timing (self, timing):
if type(timing) is types.StringType:
if timing in self.TIMING.keys():
self.timing = self.TIMING[timing]
elif type(timing) is types.IntType:
if timing in self.TIMING.values():
self.timing = timing
def get_timing (self, text=False):
if text:
return self.get_text_value(self.TIMING, self.timing)
else:
return self.timing
def set_sampling (self, sampling):
if type(sampling) is types.StringType:
if sampling in self.SAMPLING.keys():
self.sampling = self.SAMPLING[sampling]
elif type(sampling) is types.IntType:
if sampling in self.SAMPLING.values():
self.sampling = sampling
def get_sampling (self, text=False):
if text:
return self.get_text_value(self.SAMPLING, self.sampling)
else:
return self.sampling
def set_controls (self, controls):
if type(controls) is types.StringType:
if controls in self.CONTROLS.keys():
self.controls = self.CONTROLS[controls]
elif type(controls) is types.IntType:
if controls in self.CONTROLS.values():
self.controls = controls
def get_controls (self, text=False):
if text:
return self.get_text_value(self.CONTROLS, self.controls)
else:
return self.controls
def set_routes (self, routes):
for route in routes:
self.add_route(route)
# Remove routes no longer specified
for route in self.exposure_routes:
if not route.get_route() in [r.get_route() for r in routes]:
self.exposure_routes.remove(route)
def add_route (self, route):
if not route.get_route() in [r.get_route() for r in self.exposure_routes]:
route.methodology_id = self.uid
route.study_id = self.study_id
self.exposure_routes.append(route)
def get_routes (self, text=False):
if text:
return [r.get_text_value(r.ROUTE, r.route) for r in self.exposure_routes]
else:
return self.exposure_routes
def set_study_type (self, value):
"""
Each methodology has exactly one type.
"""
if type(value) is types.StringType:
if value in self.TYPES.keys():
self.study_type_id = self.TYPES[value]
elif type(value) == type(htmltext('a')):
str_value = str(value)
if str_value in self.TYPES.keys():
self.study_type_id = self.TYPES[str_value]
elif type(value) is types.IntType:
if value in self.TYPES.values():
self.study_type_id = value
self.update_values()
def get_study_type (self, text=False):
"""
Return the study design type.
"""
if text:
return self.get_text_value(self.TYPES, self.study_type_id)
else:
return self.study_type_id
def update_values (self):
"""
To keep values consistent with methodology type, "blank out"
inapplicable ones; called by set_study_type() on update.
"""
if self.get_study_type() in [
self.TYPES['experimental'],
self.TYPES['descriptive'],
self.TYPES['disease model'],
]:
self.set_timing('-')
if not self.get_study_type() in [
self.TYPES['cross sectional'],
self.TYPES['cohort'],
self.TYPES['case control']
]:
self.set_controls('-')
if not self.get_study_type() in [
self.TYPES['cross sectional']
]:
self.set_sampling('-')
def delete (self, context):
"""
Delete this methodology, and its exposure_routes, from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM methodologies
WHERE uid = %s
""", self.uid)
cursor.execute("""
DELETE FROM exposure_routes
where methodology_id = %s
""", self.uid)
except Exception, e:
context.logger.error('Methodology: %s (%s)', self.uid, e)
def load_routes (self, context):
cursor = context.get_cursor()
cursor.execute("""
SELECT * FROM exposure_routes
WHERE methodology_id = %s
""", (self.uid))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
exp_route = ExposureRoute()
for field in fields:
exp_route.set(field, row[field])
self.add_route(exp_route)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO methodologies
(uid, study_id, study_type_id,
sample_size, timing,
sampling, controls, comments,
is_mesocosm, is_enclosure,
date_modified, date_entered)
VALUES
(NULL, %s, %s,
%s, %s,
%s, %s, %s,
%s, %s,
NOW(), NOW())
""", (self.study_id, self.study_type_id,
self.sample_size, self.timing,
self.sampling, self.controls, self.comments,
int(self.is_mesocosm), int(self.is_enclosure))
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE methodologies
SET study_id = %s, study_type_id = %s,
sample_size = %s, timing = %s,
sampling = %s, controls = %s, comments = %s,
is_mesocosm = %s, is_enclosure = %s,
date_modified = NOW()
WHERE uid = %s
""", (self.study_id, self.study_type_id,
self.sample_size, self.timing,
self.sampling, self.controls, self.comments,
int(self.is_mesocosm), int(self.is_enclosure),
self.uid)
)
except Exception, e:
context.logger.error('Methodology: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
# Refill these values every time
cursor.execute("""
DELETE FROM exposure_routes
WHERE methodology_id = %s
""", self.uid)
for route in self.exposure_routes:
route.save(context)
def create_form (self, context):
form = MyForm(context)
# all methodology types get a sample size
form.add(form2.StringWidget, 'sample_size',
title='Sample size (study n)',
size=10, value=self.sample_size,
required=False)
# all methodology types get one or more routes
route_options = [(route, text, route) for text, route in ExposureRoute.ROUTE.items()]
# FIXME: what else to do about leaving out the default/empty?
route_options.remove((-1, '-', -1))
select_size = len(route_options)
form.add(form2.MultipleSelectWidget, 'exposure_routes',
title='Routes of exposure (ctrl-click to select or change multiple)',
value=[r.route for r in self.get_routes()],
options=route_options,
size=select_size,
sort=False,
required=True)
# experimental can be is_mesocosm=True
if self.get_study_type() == self.TYPES['experimental']:
form.add(form2.CheckboxWidget, 'is_mesocosm',
title='Is mesocosm?',
value=self.is_mesocosm)
# methodology types except experimental get timing
if not self.get_study_type() == self.TYPES['experimental']:
form.add(form2.SingleSelectWidget, 'timing',
title='Timing',
value=self.get_timing(),
options=[(val, name, val) for name, val in self.TIMING.items()],
sort=True,
required=True)
# all the 'c*' methodology types get controls
if self.get_study_type() in [
self.TYPES['cross sectional'],
self.TYPES['cohort'],
self.TYPES['case control']
]:
form.add(form2.SingleSelectWidget, 'controls',
title='Controls from same population?',
value=self.get_controls(),
options=[(val, name, val) for name, val in self.CONTROLS.items()],
sort=True,
required=True)
# cohort can be is_enclosure=True
if self.get_study_type() == self.TYPES['cohort']:
form.add(form2.CheckboxWidget, 'is_enclosure',
title='Is enclosure?',
value=self.is_enclosure)
# only cross sectional methodologies get sampling
if self.get_study_type() == self.TYPES['cross sectional']:
form.add(form2.SingleSelectWidget, 'sampling',
title='Sampling',
value=self.get_sampling(),
options=[(val, name, val) for name, val in self.SAMPLING.items()],
sort=True,
required=True)
# every methodology type has comments
form.add(form2.TextWidget, 'comments',
title='Comments',
rows='4', cols='60',
wrap='virtual',
value=self.comments)
form.add_submit('update', value='update')
form.add_submit('finish', value='finish')
return form
def process_form (self, form):
# all methodology types get a sample size
if form['sample_size']:
self.sample_size = form['sample_size']
# all methodology types get one or more routes
if form['exposure_routes']:
routes = []
for r in form['exposure_routes']:
route = ExposureRoute()
route.set_route(r)
routes.append(route)
self.set_routes(routes)
else:
form.set_error('exposure_routes', 'You must choose at least one route of exposure.')
# experimental can be is_mesocosm=True
if self.get_study_type() == self.TYPES['experimental']:
if form['is_mesocosm']:
self.is_mesocosm = True
else:
self.is_mesocosm = False
# all methodology types but experimental get timing
if not self.get_study_type() == self.TYPES['experimental']:
if form['timing'] == self.TIMING['-']:
form.set_error('timing', 'You must specifiy the timing.')
else:
self.set_timing(form['timing'])
# all 'c*' methodology types get controls
if self.get_study_type() in [
self.TYPES['cross sectional'],
self.TYPES['cohort'],
self.TYPES['case control']
]:
if form['controls'] == self.CONTROLS['-']:
form.set_error('controls', 'You must specify the controls.')
else:
self.set_controls(form['controls'])
# cohort can be is_enclosure=True
if self.get_study_type() == self.TYPES['cohort']:
if form['is_enclosure']:
self.is_enclosure = True
else:
self.is_enclosure = False
# only cross sectional gets sampling
if self.get_study_type() == self.TYPES['cross sectional']:
if form['sampling'] == self.SAMPLING['-']:
form.set_error('sampling', 'You must specify the sampling.')
else:
self.set_sampling(form['sampling'])
# every methodology type can have comments
if form['comments']:
self.comments = form['comments']
def find_exposures (context, search_term):
exposures = {}
if search_term \
and len(search_term) > 0:
cursor = context.get_cursor()
query_term = search_term.strip().replace(' ', '% ') + '%'
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE term LIKE %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", query_term)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
if not exposures.has_key((row['umls_concept_id'], row['umls_source_id'])):
exp = Exposure()
exp.concept_source_id = row['umls_source_id']
exp.concept_id = row['umls_concept_id']
exp.term = row['preferred_name']
exp.synonyms.append(row['term'])
exposures[(exp.concept_id, exp.concept_source_id)] = exp
else:
exp = exposures[(row['umls_concept_id'], row['umls_source_id'])]
if not row['term'] in exp.synonyms:
exp.synonyms.append(row['term'])
exposures[(exp.concept_id, exp.concept_source_id)] = exp
# Try to bump up coarse "relevance" of exact matches
exposures_ranked = exposures.values()
for exp in exposures_ranked:
if exp.term.lower() == search_term.lower()\
or search_term.lower() in [syn.lower() for syn in exp.synonyms]:
exposures_ranked.remove(exp)
exposures_ranked.insert(0, exp)
return exposures_ranked
else:
return exposures.values()
class Exposure (DTable):
TABLE_NAME = 'exposures'
UMLS_SOURCES = {
75: 'MeSH',
85: 'NCBI Taxonomy',
501: 'ITIS',
}
def __init__ (self):
self.uid = -1
self.study_id = -1
self.concept_id = -1
self.concept_source_id = -1
self.term = ''
self.synonyms = []
def __str__ (self):
out = []
out.append('<Exposure uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tconcept_id=%s (%s)' % (self.concept_id, self.concept_source_id))
out.append('\tterm=%s' % self.term)
out.append('/>')
return '\n'.join(out)
def delete (self, context):
"""
Delete this exposure from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM exposures
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('Exposure: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO exposures
(uid, study_id, concept_id,
concept_source_id, term)
VALUES
(NULL, %s, %s,
%s, %s)
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term)
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE exposures
SET study_id = %s, concept_id = %s,
concept_source_id = %s, term = %s
WHERE uid = %s
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term,
self.uid)
)
except Exception, e:
context.logger.error('Exposure: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
def find_outcomes (context, search_term):
# Note: for now, limit to only MeSH (umls_source_id==75)
outcomes = {}
if search_term \
and len(search_term) > 0:
cursor = context.get_cursor()
query_term = search_term.strip().replace(' ', '% ') + '%'
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE term LIKE %s
AND umls_source_id = %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", (query_term, 75))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
if not outcomes.has_key((row['umls_concept_id'], row['umls_source_id'])):
outcome = Outcome()
outcome.concept_source_id = row['umls_source_id']
outcome.concept_id = row['umls_concept_id']
outcome.term = row['preferred_name']
outcome.synonyms.append(row['term'])
outcomes[(outcome.concept_id, outcome.concept_source_id)] = outcome
else:
outcome = outcomes[(row['umls_concept_id'], row['umls_source_id'])]
if not row['term'] in outcome.synonyms:
outcome.synonyms.append(row['term'])
outcomes[(outcome.concept_id, outcome.concept_source_id)] = outcome
# Try to bump up coarse "relevance" of exact matches
outcomes_ranked = outcomes.values()
for outcome in outcomes_ranked:
if outcome.term.lower() == search_term.lower()\
or search_term.lower() in [syn.lower() for syn in outcome.synonyms]:
outcomes_ranked.remove(outcome)
outcomes_ranked.insert(0, outcome)
return outcomes_ranked
else:
return outcomes.values()
class Outcome (DTable):
TABLE_NAME = 'outcomes'
UMLS_SOURCES = {
75: 'MeSH',
85: 'NCBI Taxonomy',
501: 'ITIS',
}
def __init__ (self):
self.uid = -1
self.study_id = -1
self.concept_id = -1
self.concept_source_id = -1
self.term = ''
self.synonyms = []
def __str__ (self):
out = []
out.append('<Outcome uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tconcept_id=%s (%s)' % (self.concept_id, self.concept_source_id))
out.append('\tterm=%s' % self.term)
out.append('/>')
return '\n'.join(out)
def delete (self, context):
"""
Delete this outcome from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM outcomes
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('Outcome: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO outcomes
(uid, study_id, concept_id,
concept_source_id, term)
VALUES
(NULL, %s, %s,
%s, %s)
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term)
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE outcomes
SET study_id = %s, concept_id = %s,
concept_source_id = %s, term = %s
WHERE uid = %s
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term,
self.uid)
)
except Exception, e:
context.logger.error('Outcome: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
def find_risk_factors (context, search_term):
# Note: for now, limit to only MeSH (umls_source_id==75)
risk_factors = {}
if search_term \
and len(search_term) > 0:
cursor = context.get_cursor()
query_term = search_term.strip().replace(' ', '% ') + '%'
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE term LIKE %s
AND umls_source_id = %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", (query_term, 75))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
if not risk_factors.has_key((row['umls_concept_id'], row['umls_source_id'])):
risk_factor = RiskFactor()
risk_factor.concept_source_id = row['umls_source_id']
risk_factor.concept_id = row['umls_concept_id']
risk_factor.term = row['preferred_name']
risk_factor.synonyms.append(row['term'])
risk_factors[(risk_factor.concept_id, risk_factor.concept_source_id)] = risk_factor
else:
risk_factor = risk_factors[(row['umls_concept_id'], row['umls_source_id'])]
if not row['term'] in risk_factor.synonyms:
risk_factor.synonyms.append(row['term'])
risk_factors[(risk_factor.concept_id, risk_factor.concept_source_id)] = risk_factor
# Try to bump up coarse "relevance" of exact matches
risk_factors_ranked = risk_factors.values()
for risk_factor in risk_factors_ranked:
if risk_factor.term.lower() == search_term.lower()\
or search_term.lower() in [syn.lower() for syn in risk_factor.synonyms]:
risk_factors_ranked.remove(risk_factor)
risk_factors_ranked.insert(0, risk_factor)
return risk_factors_ranked
else:
return risk_factors.values()
class RiskFactor (DTable):
UMLS_SOURCES = {
75: 'MeSH',
85: 'NCBI Taxonomy',
501: 'ITIS',
}
def __init__ (self):
self.uid = -1
self.study_id = -1
self.concept_id = -1
self.concept_source_id = -1
self.term = ''
self.synonyms = []
def __str__ (self):
out = []
out.append('<RiskFactor uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tconcept_id=%s (%s)' % (self.concept_id, self.concept_source_id))
out.append('\tterm=%s' % self.term)
out.append('/>')
return '\n'.join(out)
def delete (self, context):
"""
Delete this risk_factor from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM risk_factors
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('RiskFactor: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO risk_factors
(uid, study_id, concept_id,
concept_source_id, term)
VALUES
(NULL, %s, %s,
%s, %s)
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term)
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE risk_factors
SET study_id = %s, concept_id = %s,
concept_source_id = %s, term = %s
WHERE uid = %s
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term,
self.uid)
)
except Exception, e:
context.logger.error('RiskFactor: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
def find_species (context,search_term):
species_map = {}
if search_term \
and len(search_term) > 0:
cursor = context.get_cursor()
query_term = search_term.strip().replace(' ', '% ') + '%'
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE term LIKE %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", query_term)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
if not species_map.has_key((row['umls_concept_id'], row['umls_source_id'])):
spec = Species()
spec.concept_source_id = row['umls_source_id']
spec.concept_id = row['umls_concept_id']
spec.term = row['preferred_name']
spec.synonyms.append(row['term'])
species_map[(spec.concept_id, spec.concept_source_id)] = spec
else:
spec = species_map[(row['umls_concept_id'], row['umls_source_id'])]
if not row['term'] in spec.synonyms:
spec.synonyms.append(row['term'])
species_map[(spec.concept_id, spec.concept_source_id)] = spec
# Try to bump up coarse "relevance" of exact matches
species_ranked = species_map.values()
for spec in species_ranked:
if spec.term.lower() == search_term.lower()\
or search_term.lower() in [syn.lower() for syn in spec.synonyms]:
species_ranked.remove(spec)
species_ranked.insert(0, spec)
return species_ranked
else:
return species_map.values()
class Species (DTable):
TABLE_NAME = 'species'
UMLS_SOURCES = {
75: 'MeSH',
85: 'NCBI Taxonomy',
501: 'ITIS',
}
TYPES = [
'companion',
'livestock',
'wildlife',
'laboratory',
]
def __init__ (self):
self.uid = -1
self.study_id = -1
self.concept_id = -1
self.concept_source_id = -1
self.term = ''
self.synonyms = []
self.__dict__['types'] = []
def __str__ (self):
out = []
out.append('<Species uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tconcept_id=%s (%s)' % (self.concept_id, self.concept_source_id))
out.append('\tterm=%s' % self.term)
out.append('\tsynonyms=%s' % '; '.join(self.synonyms))
out.append('\ttypes=%s' % '; '.join(self.types))
out.append('/>')
return '\n'.join(out)
def __setattr__ (self, name, value):
# self.types should be a list, but the auto-loader from Study
# will try to assign it a string. Catch here, and assume it
# will be the only time a direct assignment to self.types is
# called.
if name == 'types':
if value.__class__ == ''.__class__:
self.set_types(value)
else:
self.__dict__[name] = value
else:
self.__dict__[name] = value
def add_type (self, type):
if type in self.TYPES:
if not type in self.types:
self.types.append(type)
def clear_types (self):
self.__dict__['types'] = []
def set_types (self, types):
self.clear_types()
if types.__class__ == ''.__class__:
type_dict = dict(zip([t[0:2] for t in self.TYPES], self.TYPES))
# pass through every two chars in types
for i in range(0, len(types), 2):
type = types[i:i+2]
species_type = type_dict.get(type, None)
if species_type:
self.add_type(species_type)
elif types.__class__ == [].__class__:
for type in types:
if type in self.TYPES:
self.add_type(type)
def get_types (self, shorthand=False):
if shorthand:
sh = ''.join([type[0:2] for type in self.types])
return sh
else:
return self.types
def delete (self, context):
"""
Delete this species from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM species
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('Species: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO species
(uid, study_id, concept_id,
concept_source_id, term, types)
VALUES
(NULL, %s, %s,
%s, %s, %s)
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term, self.get_types(shorthand=True))
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE species
SET study_id = %s, concept_id = %s,
concept_source_id = %s, term = %s, types = %s
WHERE uid = %s
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term, self.get_types(shorthand=True),
self.uid)
)
except Exception, e:
context.logger.error('Species: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
class Location (DTable):
TABLE_NAME = 'locations'
def __init__ (self, uid=-1):
self.uid = uid
self.study_id = -1
self.feature_id = -1
self.name = ''
self.country = ''
self.designation = ''
def __str__ (self):
out = []
out.append('<Location uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tfeature_id=%s' % self.feature_id)
out.append('/>')
return '\n'.join(out)
def delete (self, context):
"""
Delete this location from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM locations
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('Location: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO locations
(uid, study_id, feature_id)
VALUES
(NULL, %s, %s)
""", (self.study_id, self.feature_id))
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE locations
SET study_id = %s, feature_id = %s
WHERE uid = %s
""", (self.study_id, self.feature_id,
self.uid)
)
except Exception, e:
context.logger.error('Location: %s (%s)', self.uid, e)
class Study (canary.context.Cacheable, DTable):
TABLE_NAME = 'studies'
# FIXME: does this only belong here or on loader.QueuedRecord?
# A Study has only one STATUS_TYPE
STATUS_TYPES = {
'unclaimed' : 0,
'claimed' : 1,
'curated' : 2,
}
# A Study has only one ARTICLE_TYPE
ARTICLE_TYPES = {
'unknown' : 0,
'irrelevant' : 1,
'traditional' : 2,
'general' : 3,
'review' : 4,
'outcomes only' : 5,
'exposures only' : 6,
'curated' : 7,
'duplicate' : 8,
}
# For dynamic iteration over related tables
TABLES = {
'methodologies' : Methodology,
'exposures': Exposure,
'risk_factors': RiskFactor,
'outcomes': Outcome,
'species': Species,
'locations': Location,
}
CACHE_KEY = 'study'
def __init__ (self, context=None, uid=-1, record_id=-1):
try:
if self.record_id >= 0:
return
except AttributeError:
pass
self.uid = uid
self.record_id = -1
self.status = self.STATUS_TYPES['unclaimed']
self.article_type = self.ARTICLE_TYPES['unknown']
self.curator_user_id = ''
self.has_outcomes = False
self.has_exposures = False
self.has_relationships = False
self.has_interspecies = False
self.has_exposure_linkage = False
self.has_outcome_linkage = False
self.has_genomic = False
self.comments = ''
self.methodologies = []
self.exposures = []
self.risk_factors = []
self.outcomes = []
self.species = []
self.locations = []
self.date_modified = None
self.date_entered = None
self.date_curated = None
self.history = {}
def __str__ (self):
out = []
out.append('<Study uid=%s record_id=%s' % (self.uid, self.record_id))
out.append('\tstatus=%s' % self.get_text_value(self.STATUS_TYPES, self.status))
out.append('\tcurator_user_id=%s' % self.curator_user_id)
out.append('\tarticle_type=%s' % self.get_text_value(self.ARTICLE_TYPES, self.article_type))
out.append('\thas_outcomes=%s' % self.has_outcomes)
out.append('\thas_exposures=%s' % self.has_exposures)
out.append('\thas_relationships=%s' % self.has_relationships)
out.append('\thas_interspecies=%s' % self.has_interspecies)
out.append('\thas_exposure_linkage=%s' % self.has_exposure_linkage)
out.append('\thas_outcome_linkage=%s' % self.has_outcome_linkage)
out.append('\thas_genomic=%s' % self.has_genomic)
# What are you wanting here? TYPES is not like OUTCOMES, is it?
#for table_name in self.TABLES:
# if len(getattr(self, table_name)) > 0:
# out.append('\t%s=' % table_name + \
# ','.join(getattr(self, 'get_' + table_name)(text=True)))
#if len(self.types) > 0:
# out.append('\ttypes=' + ','.join(self.get_types(text=True)))
out.append('\tcomments=%s' % self.comments or '')
out.append('/>')
return '\n'.join(out)
def get_text_value (self, lookup_table, value):
for k, v in lookup_table.iteritems():
if v == value:
return k
return ''
"""Simple accessors for basic study parameters."""
# FIXME: some of these could be parameterized.
def set_status (self, value):
if value in self.STATUS_TYPES.keys():
self.status = self.STATUS_TYPES[value]
def get_status (self, text=False):
if text:
return self.get_text_value(self.STATUS_TYPES, self.status)
else:
return self.status
def set_article_type (self, value):
try:
if str(value) in self.ARTICLE_TYPES.keys():
self.article_type = self.ARTICLE_TYPES[str(value)]
except:
# FIXME: proper error here
pass
def get_article_type (self, text=False):
if text:
return self.get_text_value(self.ARTICLE_TYPES, self.article_type)
else:
return self.article_type
def get_concept_from_concept (self, concept):
"""
For use in matching searches for exposure/species/outcome against
summary data.
NOTE: not checking 'risk_factor', but that should be refactored in
with a broader concept code refactoring.
"""
for concept_type in ('exposures', 'outcomes', 'species'):
for c in getattr(self, concept_type):
if c.concept_id == concept.uid:
# Eliminate trailing 's'
if concept_type in ('exposures', 'outcomes'):
concept_type = concept_type[:-1]
return c, concept_type
return None, None
def add_methodology (self, methodology):
for meth in self.methodologies:
if meth.uid == methodology.uid:
return
methodology.study_id = self.uid
self.methodologies.append(methodology)
def delete_methodology (self, context, methodology):
for meth in self.methodologies:
if meth.uid == methodology.uid:
self.methodologies.remove(meth)
meth.delete(context)
def get_methodology (self, id):
for methodology in self.methodologies:
if methodology.uid == id:
return methodology
return None
def has_exposure (self, exposure):
"""
Returns True if this exposure has already been added to this Study.
Note that has_exposure may be used before exposure is added,
hence it does not check exposure.uid.
"""
for exp in self.exposures:
if exp.concept_id == exposure.concept_id:
return True
return False
def add_exposure (self, exposure):
if not self.has_exposure(exposure):
exposure.study_id = self.uid
self.exposures.append(exposure)
def delete_exposure (self, context, exposure):
for exp in self.exposures:
if exp.concept_id == exposure.concept_id:
self.exposures.remove(exp)
exp.delete(context)
def get_exposure (self, id):
"""
Return the matching exposure, if added.
Note that get_exposure is for use in matching or deleting exposures,
i.e., only after an exposure has been added to the Study, so uid
matching is required.
"""
for exp in self.exposures:
if exp.uid == id:
return exp
return None
def get_exposure_from_exposure (self, exposure):
for exp in self.exposures:
if exp.concept_id == exposure.concept_id:
return exp
return None
def has_risk_factor (self, risk_factor):
"""
Returns True if this risk_factor has already been added to this Study.
Note that has_risk_factor may be used before risk_factor is added,
hence it does not check risk_factor.uid.
"""
for rf in self.risk_factors:
if rf.concept_id == risk_factor.concept_id:
return True
return False
def add_risk_factor (self, risk_factor):
if not self.has_risk_factor(risk_factor):
risk_factor.study_id = self.uid
self.risk_factors.append(risk_factor)
def delete_risk_factor (self, context, risk_factor):
for rf in self.risk_factors:
if rf.concept_id == risk_factor.concept_id:
self.risk_factors.remove(rf)
rf.delete(context)
def get_risk_factor (self, id):
"""
Return the matching risk_factor, if added.
Note that get_risk_factor is for use in matching or deleting risk_factors,
i.e., only after an risk_factor has been added to the Study, so uid
matching is required.
"""
for risk_factor in self.risk_factors:
if risk_factor.uid == id:
return risk_factor
return None
def get_risk_factor_from_risk_factor (self, risk_factor):
for rf in self.risk_factors:
if rf.concept_id == risk_factor.concept_id:
return rf
return None
def has_outcome (self, outcome):
"""
Returns True if this outcome has already been added to this Study.
Note that has_outcome may be used before outcome is added,
hence it does not check outcome.uid.
"""
for outc in self.outcomes:
if outc.concept_id == outcome.concept_id:
return True
return False
def add_outcome (self, outcome):
if not self.has_outcome(outcome):
outcome.study_id = self.uid
self.outcomes.append(outcome)
def delete_outcome (self, context, outcome):
for outc in self.outcomes:
if outc.concept_id == outcome.concept_id:
self.outcomes.remove(outc)
outc.delete(context)
def get_outcome (self, id):
"""
Return the matching outcome, if added.
Note that get_outcome is for use in matching or deleting outcomes,
i.e., only after an outcome has been added to the Study, so uid
matching is required.
"""
for outcome in self.outcomes:
if outcome.uid == id:
return outcome
return None
def get_outcome_from_outcome (self, outcome):
for outc in self.outcomes:
if outc.concept_id == outcome.concept_id:
return outc
return None
def has_species (self, species):
"""
Returns True if this species has already been added to this Study.
Note that has_species may be used before species is added,
hence it does not check species.uid.
"""
for spec in self.species:
if spec.concept_id == species.concept_id:
return True
return False
def add_species (self, species):
if not self.has_species(species):
species.study_id = self.uid
self.species.append(species)
def delete_species (self, context, species):
for spec in self.species:
if spec.concept_id == species.concept_id:
self.species.remove(spec)
spec.delete(context)
def get_species (self, id):
"""
Return the matching species, if added.
Note that get_species is for use in matching or deleting species,
i.e., only after an species has been added to the Study, so uid
matching is required.
"""
for species in self.species:
if species.uid == id:
return species
return None
def get_species_from_species (self, species):
for spec in self.species:
if spec.concept_id == species.concept_id:
return spec
return None
def has_location (self, location):
"""
Returns True if this location has already been added to this Study.
Note that has_location may be used before location is added,
hence it does not check location.uid.
"""
for loc in self.locations:
if loc.feature_id == location.feature_id:
return True
return False
def has_feature (self, feature):
"""
Returns True if this feature has already been added to this Study.
"""
for loc in self.locations:
if loc.feature_id == feature.uid:
return True
return False
def add_location (self, location):
if not self.has_location(location):
location.study_id = self.uid
self.locations.append(location)
def delete_location (self, context, location):
for loc in self.locations:
if loc.uid == location.uid:
self.locations.remove(loc)
loc.delete(context)
def get_location (self, id):
"""
Return the matching location, if added.
Note that get_location is for use in matching or deleting locations,
i.e., only after an location has been added to the Study, so uid
matching is required.
"""
for loc in self.locations:
if loc.uid == id:
return loc
return None
def get_location_from_feature (self, feature):
for loc in self.locations:
if loc.feature_id == feature.uid:
return loc
return None
def get_locations_sorted (self, context):
"""
For a set of canary record locations, return them in sort order
by lower((country_name, region_name, feature_name)).
"""
gazeteer = context.get_gazeteer()
locs = []
for location in self.locations:
feature = Feature(uid=location.feature_id)
feature.load(context)
if gazeteer.fips_codes.has_key((feature.country_code, feature.adm1)):
region_name = gazeteer.fips_codes[(feature.country_code, feature.adm1)]
else:
region_name = ''
name = feature.name
type = gazeteer.feature_codes[feature.feature_type]
region_name = render_capitalized(region_name)
country_name = render_capitalized(gazeteer.country_codes[feature.country_code])
locs.append(
((country_name.lower(), region_name.lower(), name.lower()),
(name, type, region_name, country_name))
)
locs.sort()
return locs
def get_lat_longs (self, context, dms=False):
"""
For a set of canary record locations, return their latitudes
and longitudes as two lists.
"""
lats = longs = []
for location in self.locations:
feature = Feature(uid=location.feature_id)
feature.load(context)
if dms:
lats.append(feature.dms_latitude)
longs.append(feature.dms_longitude)
else:
lats.append(feature.latitude)
longs.append(feature.longitude)
return lats, longs
def add_history (self, uid=-1, curator_user_id='', message='', modified=''):
"""
Add a history record; only one history record can be added to a
study_history at a time (because the key is set to -1). Maybe
that's bad design. :\
"""
# Convert w/str() in case htmltext is passed by mistake
curator_user_id = str(curator_user_id)
message = str(message)
new_history = {
'uid': uid,
'study_id': self.uid,
'curator_user_id': curator_user_id,
'message': message,
'modified': modified
}
self.history[new_history['uid']] = new_history
def load (self, context):
# Can't load a new study; it hasn't been saved yet.
if self.uid == -1:
return
# Is it already loaded? Convenience check for client calls
# don't need to verify loads from the cache.
if context.config.use_cache:
try:
if self.record_id >= 0:
# Already loaded
return
except AttributeError:
# Note already loaded, so continue
pass
cursor = context.get_cursor()
cursor.execute("""
SELECT *
FROM studies
WHERE uid = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
if rows and len(rows) > 0:
row = dtuple.DatabaseTuple(desc, rows[0])
for field in fields:
self.set(field, row[field])
# Every table_class is a DTable
for table_name, table_class in self.TABLES.items():
select_phrase = """SELECT * FROM %s """ % table_name
cursor.execute(select_phrase + """
WHERE study_id = %s
""", (self.uid))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
table_class_instance = table_class()
for field in fields:
table_class_instance.set(field, row[field])
getattr(self, table_name).append(table_class_instance)
for meth in self.methodologies:
meth.load_routes(context)
cursor.execute("""
SELECT *
FROM study_history
WHERE study_id = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
history_record = {}
for field in fields:
history_record[field] = row[field]
self.add_history(uid=history_record['uid'],
curator_user_id=history_record['curator_user_id'],
message=history_record['message'],
modified=history_record['modified'])
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
try:
cursor.execute("""
INSERT INTO studies
(uid,
record_id, status, article_type, curator_user_id,
has_outcomes, has_exposures,
has_relationships, has_interspecies,
has_exposure_linkage, has_outcome_linkage,
has_genomic, comments,
date_modified, date_entered, date_curated)
VALUES
(NULL,
%s, %s, %s, %s,
%s, %s,
%s, %s,
%s, %s,
%s, %s,
NOW(), NOW(), %s)
""", (self.record_id, self.status, self.article_type, self.curator_user_id,
int(self.has_outcomes), int(self.has_exposures),
int(self.has_relationships), int(self.has_interspecies),
int(self.has_exposure_linkage), int(self.has_outcome_linkage),
int(self.has_genomic), self.comments,
self.date_curated)
)
except Exception, e:
context.logger.error('Save study: %s (%s)', self.uid, e)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE studies
SET record_id = %s, status = %s, article_type = %s, curator_user_id = %s,
has_outcomes = %s, has_exposures = %s,
has_relationships = %s, has_interspecies = %s,
has_exposure_linkage = %s, has_outcome_linkage = %s,
has_genomic = %s, comments = %s,
date_modified = NOW(), date_curated = %s
WHERE uid = %s
""", (self.record_id, self.status, self.article_type, self.curator_user_id,
int(self.has_outcomes), int(self.has_exposures),
int(self.has_relationships), int(self.has_interspecies),
int(self.has_exposure_linkage), int(self.has_outcome_linkage),
int(self.has_genomic), self.comments,
self.date_curated,
self.uid)
)
except Exception, e:
context.logger.error('Update study: %s', e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
# update all the related table values
for table_name in self.TABLES.keys():
for item in getattr(self, table_name):
item.save(context)
# Save new history records; assume only one can be added at a time,
# new record will necessarily have uid == -1
if self.history:
new_history_record = self.history.get(-1, None)
if new_history_record:
try:
cursor.execute("""
INSERT INTO study_history
(uid, study_id, curator_user_id,
message, modified)
VALUES
(NULL, %s, %s,
%s, NOW())
""", (self.uid, new_history_record['curator_user_id'],
new_history_record['message']))
new_history_record_id = self.get_new_uid(context)
del(self.history[-1])
self.history[new_history_record_id] = new_history_record
except Exception, e:
context.logger.error('Save study history: %s (%s)', self.uid, e)
if context.config.use_cache:
# Force reload on next call to flush history times
context.cache_delete('%s:%s' % (self.CACHE_KEY, self.uid))
def delete (self, context):
cursor = context.get_cursor()
try:
for table_name in self.TABLES.keys():
for item in getattr(self, table_name):
item.delete(context)
cursor.execute("""
DELETE FROM studies
WHERE uid = %s
""", self.uid)
if context.config.use_cache:
context.cache_delete('%s:%s' % (self.CACHE_KEY, self.uid))
except Exception, e:
context.logger.error('Delete study: %s', e)
| {
"repo_name": "dchud/sentinel",
"path": "canary/study.py",
"copies": "1",
"size": "63030",
"license": "mit",
"hash": -2656050997565630,
"line_mean": 34.3307174888,
"line_max": 100,
"alpha_frac": 0.5066476281,
"autogenerated": false,
"ratio": 4.104584527220631,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5111232155320631,
"avg_score": null,
"num_lines": null
} |
# $Id$
import dtuple
import re
class SubjectHeading:
def __repr__ (self):
return """<SubjectHeading term=%s, is_focus=%s, qualifier=%s>""" % (self.term,
self.is_focus, self.qualifier)
def __init__ (self, text=''):
if not text == '':
self.parse_text(text)
else:
self.text = text
self.term = ''
self.qualifier = ''
self.is_focus = False
def parse_text (self, text):
"""
Parse an incoming MeSH string in one of the forms:
'Growth Substances'
'Growth Substances*'
'Growth Substances/genetics'
'Growth Substances/genetics*'
'Growth Substances/*genetics'
'Growth Substances/ge [Genetics]' (From Ovid Medline)
...into its component parts.
"""
text = text.strip()
self.text = text
# '*' indicates it's a focus heading[/qualifier]
if '*' in text:
self.is_focus = 1
text = text.replace('*', '')
else:
self.is_focus = 0
# '/' indicates there's an qualifier attached to the term
slash_index = text.find('/')
if slash_index > 0:
self.term = text[0:slash_index]
self.qualifier = text[(slash_index + 1):]
if self.qualifier[-1] == ']':
self.qualifier = self.qualifier[(self.qualifier.index('[') + 1) :
(self.qualifier.index(']'))].lower()
else:
self.term = text
self.qualifier = ''
class Record:
def __init__ (self):
self.data = {}
self.mesh = []
self.outcomes = []
self.methodologies = []
self.exposures = []
self.species = []
def set (self, field, value):
if value == None:
setattr(self, str(field), str(''))
else:
setattr(self, str(field), str(value))
#self.data[field] = value
def get (self, field):
return getattr(self, str(field))
#if self.data.has_key(field):
# return self.data[field]
#else:
# return ''
def get_boolean (self, field):
value = getattr(self, str(field))
if value == '0':
return "No"
else:
return "Yes"
def load_by_pmid (self, context, pubmed_id):
cursor = context.get_cursor()
cursor.execute("""
SELECT *
FROM sentinel_studies
WHERE pubmed_id = %s
""", pubmed_id)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = cursor.fetchone()
if row:
row = dtuple.DatabaseTuple(desc, row)
for field in fields:
self.set(field, row[field])
cursor.execute("""
SELECT mesh_heading
FROM reference_mesh
WHERE reference_id = %s
""", self.reference_id)
while 1:
row = cursor.fetchone()
if row == None:
break
sh = SubjectHeading(row[0])
self.mesh.append(sh)
cursor.execute("""
SELECT human_disease, nature_of_relevance
FROM reference_disease
WHERE reference_id = %s
""", self.reference_id)
while 1:
row = cursor.fetchone()
if row == None:
break
self.outcomes.append((row[0], row[1]))
cursor.execute("""
SELECT methodology
FROM reference_methodology
WHERE reference_id = %s
""", self.reference_id)
while 1:
row = cursor.fetchone()
if row == None:
break
self.methodologies.append(row[0])
cursor.execute("""
SELECT exposure_agent
FROM reference_exposure
WHERE reference_id = %s
""", self.reference_id)
while 1:
row = cursor.fetchone()
if row == None:
break
self.exposures.append(row[0])
cursor.execute("""
SELECT species_name
FROM reference_species
WHERE reference_id = %s
""", self.reference_id)
while 1:
row = cursor.fetchone()
if row == None:
break
self.species.append(row[0])
| {
"repo_name": "dchud/sentinel",
"path": "canary/record.py",
"copies": "1",
"size": "4694",
"license": "mit",
"hash": -4549324444631187500,
"line_mean": 27.975308642,
"line_max": 86,
"alpha_frac": 0.4629314018,
"autogenerated": false,
"ratio": 4.411654135338346,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5374585537138347,
"avg_score": null,
"num_lines": null
} |
# $Id$
import dtuple
from canary.utils import DTable
class Feature (DTable):
def __init__ (self, context=None, uid=-1):
self.uid = uid
self.data_source = ''
self.latitude = 0.0
self.longitude = 0.0
self.dms_latitude = 0
self.dms_longitude = 0
self.feature_type = ''
self.country_code = ''
self.adm1 = 0
self.adm2 = ''
self.name = ''
def load (self, context):
cursor = context.get_cursor()
cursor.execute("""
SELECT *
FROM gazeteer
WHERE uid = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = cursor.fetchone()
if row:
row = dtuple.DatabaseTuple(desc, row)
for field in fields:
self.set(field, row[field])
class Gazeteer:
"""
Represents the gazeteer in general, including simple lookup values.
"""
def __init__ (self):
self.country_codes = {}
self.feature_codes = {}
self.fips_codes = {}
def load (self, context):
cursor = context.get_cursor()
cursor.execute("""
SELECT code, name
FROM gazeteer_countries
""")
rows = cursor.fetchall()
for row in rows:
self.country_codes[row[0]] = row[1]
cursor.execute("""
SELECT designation, name
FROM gazeteer_features
""")
rows = cursor.fetchall()
for row in rows:
self.feature_codes[row[0]] = row[1]
cursor.execute("""
SELECT country_code, fips_code, name
FROM gazeteer_fips_codes
""")
rows = cursor.fetchall()
for row in rows:
self.fips_codes[(row[0], row[1])] = row[2]
def search (self, context, feature_name, region='', country='', params={}):
cursor = context.get_cursor()
results = []
search_token = feature_name.strip() + '%'
if region:
region_name = region.strip() + '%'
if country:
country_name = country.strip() + '%'
cursor.execute("""
SELECT gazeteer.uid, gazeteer.name, gazeteer.country_code, gazeteer.adm1,
gazeteer.feature_type, gazeteer.latitude, gazeteer.longitude
FROM gazeteer, gazeteer_countries, gazeteer_fips_codes
WHERE gazeteer_countries.code = gazeteer.country_code
AND gazeteer_fips_codes.fips_code = gazeteer.adm1
AND MATCH (gazeteer.name) AGAINST (%s)
AND gazeteer_countries.name LIKE %s
AND gazeteer_fips_codes.name LIKE %s
LIMIT 100
""", (feature_name, country_name, region_name))
else:
cursor.execute("""
SELECT gazeteer.uid, gazeteer.name, gazeteer.country_code, gazeteer.adm1,
gazeteer.feature_type, gazeteer.latitude, gazeteer.longitude
FROM gazeteer, gazeteer_fips_codes
WHERE gazeteer_fips_codes.fips_code = gazeteer.adm1
AND MATCH (gazeteer.name) AGAINST (%s)
AND gazeteer_fips_codes.name LIKE %s
LIMIT 100
""", (feature_name, region_name))
elif country:
country_name = country.strip() + '%'
cursor.execute("""
SELECT gazeteer.uid, gazeteer.name, gazeteer.country_code, gazeteer.adm1,
gazeteer.feature_type, gazeteer.latitude, gazeteer.longitude
FROM gazeteer, gazeteer_countries
WHERE gazeteer_countries.code = gazeteer.country_code
AND MATCH (gazeteer.name) AGAINST (%s)
AND gazeteer_countries.name LIKE %s
LIMIT 100
""", (feature_name, country_name))
else:
cursor.execute("""
SELECT *
FROM gazeteer
WHERE MATCH (name) AGAINST (%s)
LIMIT 100
""", feature_name)
while 1:
row = cursor.fetchone()
if row == None: break
feature = Feature()
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = dtuple.DatabaseTuple(desc, row)
for field in fields:
feature.set(field, row[field])
results.append(feature)
# Try to bump up coarse "relevance" of exact matches
results_ranked = results
for result in results_ranked:
if result.name.lower() == feature_name.lower():
results_ranked.remove(result)
results_ranked.insert(0, result)
return results_ranked
| {
"repo_name": "dchud/sentinel",
"path": "canary/gazeteer.py",
"copies": "1",
"size": "5159",
"license": "mit",
"hash": 6602684059664104000,
"line_mean": 34.5793103448,
"line_max": 94,
"alpha_frac": 0.5018414421,
"autogenerated": false,
"ratio": 4.097696584590945,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5099538026690945,
"avg_score": null,
"num_lines": null
} |
# $Id$
# importing this module shouldn't directly cause other large imports
# do large imports in the init() hook so that you can call back to the
# ModuleManager progress handler methods.
"""vtk_kit package driver file.
This performs all initialisation necessary to use VTK from DeVIDE. Makes
sure that all VTK classes have ErrorEvent handlers that report back to
the ModuleManager.
Inserts the following modules in sys.modules: vtk, vtkdevide.
@author: Charl P. Botha <http://cpbotha.net/>
"""
import re
import sys
import traceback
import types
VERSION = ''
def preImportVTK(progressMethod):
vtkImportList = [('vtk.common', 'VTK Common.'),
('vtk.filtering', 'VTK Filtering.'),
('vtk.io', 'VTK IO.'),
('vtk.imaging', 'VTK Imaging.'),
('vtk.graphics', 'VTK Graphics.'),
('vtk.rendering', 'VTK Rendering.'),
('vtk.hybrid', 'VTK Hybrid.'),
#('vtk.patented', 'VTK Patented.'),
('vtk', 'Other VTK symbols')]
# set the dynamic loading flags. If we don't do this, we get strange
# errors on 64 bit machines. To see this happen, comment this statement
# and then run the VTK->ITK connection test case.
oldflags = setDLFlags()
percentStep = 100.0 / len(vtkImportList)
currentPercent = 0.0
# do the imports
for module, message in vtkImportList:
currentPercent += percentStep
progressMethod(currentPercent, 'Initialising vtk_kit: %s' % (message,),
noTime=True)
exec('import %s' % (module,))
# restore previous dynamic loading flags
resetDLFlags(oldflags)
def setDLFlags():
# brought over from ITK Wrapping/CSwig/Python
# Python "help(sys.setdlopenflags)" states:
#
# setdlopenflags(...)
# setdlopenflags(n) -> None
#
# Set the flags that will be used for dlopen() calls. Among other
# things, this will enable a lazy resolving of symbols when
# importing a module, if called as sys.setdlopenflags(0) To share
# symbols across extension modules, call as
#
# sys.setdlopenflags(dl.RTLD_NOW|dl.RTLD_GLOBAL)
#
# GCC 3.x depends on proper merging of symbols for RTTI:
# http://gcc.gnu.org/faq.html#dso
#
try:
import dl
newflags = dl.RTLD_NOW|dl.RTLD_GLOBAL
except:
newflags = 0x102 # No dl module, so guess (see above).
try:
oldflags = sys.getdlopenflags()
sys.setdlopenflags(newflags)
except:
oldflags = None
return oldflags
def resetDLFlags(data):
# brought over from ITK Wrapping/CSwig/Python
# Restore the original dlopen flags.
try:
sys.setdlopenflags(data)
except:
pass
def init(module_manager, pre_import=True):
# first do the VTK pre-imports: this is here ONLY to keep the user happy
# it's not necessary for normal functioning
if pre_import:
preImportVTK(module_manager.setProgress)
# import the main module itself
# the global is so that users can also do:
# from module_kits import vtk_kit
# vtk_kit.vtk.vtkSomeFilter()
global vtk
import vtk
# and do the same for vtkdevide
global vtkdevide
import vtkdevide
# load up some generic functions into this namespace
# user can, after import of module_kits.vtk_kit, address these as
# module_kits.vtk_kit.blaat. In this case we don't need "global",
# as these are modules directly in this package.
import module_kits.vtk_kit.misc as misc
import module_kits.vtk_kit.mixins as mixins
import module_kits.vtk_kit.utils as utils
import module_kits.vtk_kit.constants as constants
import module_kits.vtk_kit.color_scales as color_scales
# setup the kit version
global VERSION
VERSION = '%s' % (vtk.vtkVersion.GetVTKVersion(),)
| {
"repo_name": "fvpolpeta/devide",
"path": "module_kits/vtk_kit/__init__.py",
"copies": "6",
"size": "3965",
"license": "bsd-3-clause",
"hash": -2397312117573458400,
"line_mean": 30.2204724409,
"line_max": 79,
"alpha_frac": 0.6393442623,
"autogenerated": false,
"ratio": 3.7300094073377235,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7369353669637723,
"avg_score": null,
"num_lines": null
} |
# $Id$
import logging
import random
import sha
import canary
from canary import dtuple
from canary.utils import DTable
logger = logging.getLogger('canary.user')
# Copied from dulcinea.user
def hash_password (password):
"""Apply a one way hash function to a password and return the result."""
return sha.new(password).hexdigest()
def get_user_by_id (context, id):
"""
Find user for existing user id.
"""
if id == None:
return None
user = User(id=id)
try:
user.load(context)
return user
except 'InvalidUserId', e:
context.logger.error('Could not load user %s', id)
return None
def get_user_by_email (context, email):
"""
Find user for existing account by email address.
"""
if email == None:
return None
user = User(email=email)
try:
user.load(context)
return user
except 'InvalidUserId', e:
context.logger.error('Could not load user %s', email)
return None
def get_user_by_uid (context, uid):
"""
Find user for existing uid.
"""
if uid == None:
return None
user = User(uid=uid)
try:
user.load(context)
return user
except Exception, e:
context.logger.error('Could not load user %s (%s)', uid, e)
return None
def get_user_by_yale_netid (context, netid):
"""
Find user for existing Yale netid.
"""
if netid == None:
return None
cursor = context.get_cursor()
cursor.execute("""
SELECT id
FROM users
WHERE netid = %s
""", netid)
rows = cursor.fetchall()
if not len(rows) == 1:
return None
return get_user_by_id(context, rows[0][0])
def get_users (context):
""" Return all users. """
users = {}
cursor = context.get_cursor()
cursor.execute("""
SELECT *
FROM users
""")
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
user = User()
row = dtuple.DatabaseTuple(desc, row)
for field in fields:
user.set(field, row[field])
users[user.id] = user
return users
class User (DTable):
TABLE_NAME = 'users'
def __init__ (self, uid=-1, id='', name='', email='', netid=''):
self.uid = uid
self.id = id
self.is_active = int(True)
self.is_admin = int(False)
self.is_editor = int(False)
self.name = name
self.passwd = ''
self.is_assistant = int(False)
self.email = email
self.netid = ''
self.token = ''
self.wants_news = int(False)
self.searches = []
# key=record.uid, value=Userrecord
self.records = {}
# key=set.uid, value=UserRecord
self.record_set_map = {}
self.sets = []
self.logger = logging.getLogger(str(self.__class__))
def __str__ (self):
return self.id or "*no id*"
# -- Password methods ----------------------------------------------
# Taken nearly verbatim from dulcinea.user
def set_password (self, new_password):
"""Set the user's password to 'new_password'."""
self.passwd = hash_password(new_password)
def valid_password (self, password):
"""Return true if the provided password is correct."""
if not password:
return False
return self.passwd == hash_password(password)
def unverify (self):
self.is_active = int(False)
self.token = str(random.randrange(216688554,753377224))
def verify (self, token):
if token == self.token:
self.is_active = int(True)
self.token = ''
return True
return False
def get_id (self):
"""Compatibility method for quixote and old canary code."""
return self.id
def load (self, context):
cursor = context.get_cursor()
if self.id:
cursor.execute("""
SELECT *
FROM users
WHERE id LIKE %s
""", (self.id))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = cursor.fetchone()
if row:
row = dtuple.DatabaseTuple(desc, row)
for field in fields:
self.set(field, row[field])
else:
self.logger.debug('No user "%s"', self.id)
raise 'InvalidUserId'
elif self.email:
cursor.execute("""
SELECT *
FROM users
WHERE email LIKE %s
""", (self.email))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = cursor.fetchone()
if row:
row = dtuple.DatabaseTuple(desc, row)
for field in fields:
self.set(field, row[field])
else:
self.logger.debug('No user "%s"', self.email)
raise 'InvalidUserId'
# Load records
cursor.execute("""
SELECT *
FROM user_records
WHERE user_id = %s
ORDER BY uid
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
for row in cursor.fetchall():
row = dtuple.DatabaseTuple(desc, row)
self.records[row['record_id']] = UserRecord(uid=row['uid'],
user_id=self.uid, record_id=row['record_id'], notes=row['notes'])
# Load sets
cursor.execute("""
SELECT uid
FROM user_sets
WHERE user_id = %s
ORDER BY LOWER(name)
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
set_ids = []
for row in cursor.fetchall():
row = dtuple.DatabaseTuple(desc, row)
set_ids.append(row['uid'])
try:
set_map = context.cache_get_multi(list('%s:%s' % (UserSet.CACHE_KEY, id) for id in set_ids))
self.sets.extend(list(set_map['%s:%s' % (UserSet.CACHE_KEY, id)] for id in set_ids))
except:
for id in set_ids:
self.sets.append(UserSet(context, id))
# Load record->set assignments
for set in self.sets:
cursor.execute("""
SELECT record_id
FROM user_set_records
WHERE user_set_id = %s
ORDER BY uid
""", set.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
for row in cursor.fetchall():
row = dtuple.DatabaseTuple(desc, row)
# Save the id in the set's in-memory list of records
set.records.append(row['record_id'])
# Get the whole UserRecord
rec = self.records[row['record_id']]
# Save the UserRecord in the set map
try:
self.record_set_map[set.uid].append(rec)
except:
self.record_set_map[set.uid] = [rec]
def save (self, context):
cursor = context.get_cursor()
try:
if self.uid == -1:
insert_phrase = 'INSERT INTO %s' % self.TABLE_NAME
cursor.execute(insert_phrase + """
(uid, id, name, passwd, email,
is_active, is_admin,
is_editor, is_assistant, netid,
token, wants_news)
VALUES
(NULL, %s, %s, %s, %s,
%s, %s,
%s, %s, %s,
%s, %s)
""", (self.id, self.name, self.passwd, self.email,
int(self.is_active), int(self.is_admin),
int(self.is_editor), int(self.is_assistant), self.netid,
self.token, self.wants_news)
)
self.uid = self.get_new_uid(context)
self.logger.info('User %s created with uid %s', self.id, self.uid)
else:
update_phrase = 'UPDATE %s ' % self.TABLE_NAME
cursor.execute(update_phrase + """
SET id=%s, passwd=%s, name=%s, email=%s,
is_active=%s, is_admin=%s,
is_editor=%s, is_assistant=%s, netid=%s,
token=%s, wants_news=%s
WHERE uid = %s
""", (self.id, self.passwd, self.name, self.email,
int(self.is_active), int(self.is_admin),
int(self.is_editor), int(self.is_assistant), self.netid,
self.token, int(self.wants_news),
self.uid)
)
self.logger.info('User %s updated', self.id)
except Exception, e:
self.logger.error(e)
def delete (self, context):
""" Delete this user from the database."""
cursor = context.get_cursor()
try:
if self.uid >= 0:
cursor.execute("""
DELETE FROM users
WHERE uid = %s
""", self.uid)
except Exception, e:
self.logger.error(e)
class UserSet (canary.context.Cacheable, DTable):
"""
A group of records saved by some user. UserRecords can be in zero-to-many
UserSets.
When a UserRecord is deleted, all references to that record in UserSets are
deleted.
When a UserSet is deleted, the user might keep the UserRecord, but the set
and all its assignments are deleted.
"""
TABLE_NAME = 'user_sets'
CACHE_KEY = 'userset'
CACHE_CHECK_FIELD = 'name'
load = canary.context.Cacheable.load
def __init__ (self, context=None, uid=-1, user_id=-1, name='', is_locked=False):
try:
if getattr(self, self.CACHE_CHECK_FIELD):
return
except AttributeError:
pass
self.uid = uid
self.user_id = user_id
self.name = name
self.is_locked = is_locked
# in-memory only: a list of record_ids
self.records = []
self.shares = []
def add (self, context, user_record):
"""Add a record to this set."""
cursor = context.get_cursor()
try:
cursor.execute("""
INSERT INTO user_set_records
(uid, user_set_id, record_id)
VALUES (NULL, %s, %s)
""", (self.uid, user_record.record_id))
if context.config.use_cache:
context.cache_delete('%s:%s' % (self.CACHE_KEY, self.uid))
except Exception, e:
context.logger.error(e)
def remove (self, context, user_record):
"""Remove a record from this set."""
cursor = context.get_cursor()
try:
cursor.execute("""
DELETE FROM user_set_records
WHERE user_set_id = %s
AND record_id = %s
""", (self.uid, user_record.record_id))
if context.config.use_cache:
context.cache_delete('%s:%s' % (self.CACHE_KEY, self.uid))
except Exception, e:
context.logger.error(e)
def save (self, context):
cursor = context.get_cursor()
try:
if self.uid == -1:
insert_phrase = 'INSERT INTO %s' % self.TABLE_NAME
cursor.execute(insert_phrase + """
(uid, user_id, name, is_locked)
VALUES
(NULL, %s, %s, %s)
""", (self.user_id, self.name, int(self.is_locked))
)
self.uid = self.get_new_uid(context)
context.logger.info('UserSet "%s" created with uid %s', self.name, self.uid)
else:
update_phrase = 'UPDATE %s ' % self.TABLE_NAME
cursor.execute(update_phrase + """
SET name=%s, is_locked=%s
WHERE uid = %s
""", (self.name, int(self.is_locked),
self.uid)
)
context.logger.info('UserSet %s updated', self.uid)
if context.config.use_cache:
context.cache_set('%s:%s' % (self.CACHE_KEY, self.uid), self)
except Exception, e:
context.logger.error(e)
def delete (self, context):
cursor = context.get_cursor()
try:
# First delete all records assigned to this set
cursor.execute("""
DELETE FROM user_set_records
WHERE user_set_id = %s
""", self.uid)
# Then remove the set
delete_phrase = 'DELETE FROM %s' % self.TABLE_NAME
cursor.execute(delete_phrase + """
WHERE uid = %s
""", self.uid)
DTable.delete(self, context)
if context.config.use_cache:
context.cache_delete('%s:%s' % (self.CACHE_KEY, self.uid))
except Exception, e:
context.logger.error(e)
class UserRecord (canary.context.Cacheable, DTable):
TABLE_NAME = 'user_records'
CACHE_KEY = 'userrecord'
CACHE_CHECK_FIELD = 'name'
load = canary.context.Cacheable.load
def __init__ (self, context=None, uid=-1, user_id=-1, record_id=-1,
notes='', date_created=None):
try:
if self.user_id:
return
except AttributeError:
pass
self.uid = uid
self.user_id = user_id
self.record_id = record_id
self.notes = notes
self.date_created = date_created
self.sets = []
def save (self, context):
cursor = context.get_cursor()
try:
if self.uid == -1:
insert_phrase = 'INSERT INTO %s' % self.TABLE_NAME
cursor.execute(insert_phrase + """
(uid, user_id, record_id, notes, date_created)
VALUES
(NULL, %s, %s, %s, NOW())
""", (self.user_id, self.record_id, self.notes)
)
self.uid = self.get_new_uid(context)
context.logger.info('UserRecord %s created with uid %s', self.record_id, self.uid)
else:
# NOTE: only updates notes field!
update_phrase = 'UPDATE %s ' % self.TABLE_NAME
cursor.execute(update_phrase + """
SET notes=%s
WHERE uid = %s
""", (self.notes,
self.uid)
)
context.logger.info('UserRecord %s updated', self.uid)
except Exception, e:
context.logger.error(e)
def delete (self, context):
cursor = context.get_cursor();
try:
# First delete all sets to which this record was assigned
cursor.execute("""
DELETE FROM user_set_records
WHERE record_id = %s
""", self.record_id)
# Then delete the record itself
DTable.delete(self, context)
if context.config.use_cache:
context.cache_delete('%s:%s' % (self.CACHE_KEY, self.uid))
except Exception, e:
context.logger.error(e)
class UserSearch (canary.context.Cacheable, DTable):
TABLE_NAME = 'user_searches'
CACHE_KEY = 'usersearch'
def __init__ (self, uid=-1, user_id=-1, query=''):
self.uid = uid
self.user_id = user_id
self.query = query
self.email_frequency = 'Never'
self.date_created = None
self.date_viewed = None
| {
"repo_name": "dchud/sentinel",
"path": "canary/user.py",
"copies": "1",
"size": "16437",
"license": "mit",
"hash": 6351847147434473000,
"line_mean": 31.874,
"line_max": 104,
"alpha_frac": 0.4933990388,
"autogenerated": false,
"ratio": 4.12678885262365,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.512018789142365,
"avg_score": null,
"num_lines": null
} |
# $Id$
import logging
import traceback
import types
import canary.context
from canary.utils import DTable
import dtuple
class Concept (canary.context.Cacheable, DTable):
# FIXME: Resolve conflict between Exp/Out/Spec as "concepts" and this
# 'Concept'; namely that Concept.uid == Exp/Out/Spec.concept_id.
# Can wait until refactoring.
CACHE_KEY = 'concept'
def __init__ (self, context=None, uid=-1, load_synonyms=False):
try:
if self.term:
return
except AttributeError:
pass
self.uid = uid
self.study_id = -1
self.concept_source_id = -1
self.concept_source_code = ''
self.term = ''
self.sources = []
self.synonyms = []
def load (self, context, load_synonyms=True):
if self.uid == -1:
return
# Is it already loaded? Convenience check for client calls
# don't need to verify loads from the cache.
if context.config.use_cache:
try:
if self.term:
# Already loaded
return
except AttributeError:
# Note already loaded, so continue
pass
cursor = context.get_cursor()
cursor.execute("""
SELECT umls_concepts.preferred_name,
umls_concepts_sources.umls_source_id,
umls_concepts_sources.umls_source_code
FROM umls_concepts, umls_concepts_sources
WHERE umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
AND umls_concepts.umls_concept_id = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
self.term = row['preferred_name']
self.sources.append((row['umls_source_id'], row['umls_source_code']))
self.concept_source_id = row['umls_source_id']
self.concept_source_code = row['umls_source_code']
if load_synonyms:
# NOTE: Is there any value in using umls_term_id? It's ignored here.
cursor.execute("""
SELECT term
FROM umls_terms
WHERE umls_concept_id = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
synonym = row['term']
if not synonym in self.synonyms:
self.synonyms.append(synonym)
def save (self, context, update_all=False):
# NOTE: For now, do not allow creation of arbitrary concepts
if self.uid == -1:
return
cursor = context.get_cursor()
# NOTE: For now, only allow update of preferred_name
cursor.execute("""
UPDATE umls_concepts
SET preferred_name = %s
WHERE umls_concept_id = %s
""", (self.term, self.uid))
if context.config.use_cache:
context.cache_set('%s:%s' % (self.CACHE_KEY, self.uid), self)
def add_synonym (self, context, term):
cursor = context.get_cursor()
# If a synonym does not yet exist, add it here, starting at id 20,000,000
# (5,000,000+ and 10,000,000+ are already in use from ITIS faux-merge)
if not term in self.synonyms:
cursor.execute("""
SELECT MAX(umls_term_id) AS max_id
FROM umls_terms
""")
row = cursor.fetchone()
current_max = row[0]
if current_max < 20000000:
new_max = 20000001
else:
new_max = current_max + 1
cursor.execute("""
INSERT INTO umls_terms
(umls_term_id, term, umls_concept_id)
VALUES (%s, %s, %s)
""", (new_max, term, self.uid))
def find_concepts (context, search_term):
cursor = context.get_cursor()
concepts = {}
if isinstance(search_term, types.IntType):
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE umls_concepts.umls_concept_id = %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
GROUP BY umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", search_term)
else:
# Assumes search_term is text
if search_term \
and len(search_term) > 0:
query_term = search_term.strip().replace(' ', '% ') + '%'
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE term LIKE %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
GROUP BY umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", query_term)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
if not concepts.has_key((row['umls_concept_id'], row['umls_source_id'])):
concept = Concept(uid=row['umls_concept_id'])
concept.concept_source_id = row['umls_source_id']
concept.term = row['preferred_name']
concept.synonyms.append(row['term'])
concepts[(concept.uid, concept.concept_source_id)] = concept
else:
concept = concepts[(row['umls_concept_id'], row['umls_source_id'])]
if not row['term'] in concept.synonyms:
concept.synonyms.append(row['term'])
concepts[(concept.uid, concept.concept_source_id)] = concept
if not isinstance(search_term, types.IntType):
# Try to bump up coarse "relevance" of exact matches
concepts_ranked = concepts.values()
for concept in concepts_ranked:
if concept.term.lower() == search_term.lower()\
or search_term.lower() in [syn.lower() for syn in concept.synonyms]:
concepts_ranked.remove(concept)
concepts_ranked.insert(0, concept)
return concepts_ranked
return concepts.values()
class Category (DTable):
concept_types = [
'exposure',
'risk_factor',
'outcome',
'species',
'location',
]
def __init__ (self, uid=-1, name=''):
self.uid = uid
self.name = name
self.types = []
self.groups = []
self.concepts = []
self.logger = logging.getLogger(str(self.__class__))
def add_type (self, type):
if type in self.concept_types \
and not type in self.types:
self.types.append(type)
def clear_types (self):
self.types = []
def set_types (self, types):
self.clear_types()
if types.__class__ == ''.__class__:
type_dict = dict(zip([t[0:1] for t in self.concept_types],
self.concept_types))
for type in types:
concept_type = type_dict.get(type, None)
if concept_type:
self.add_type(concept_type)
elif types.__class__ == [].__class__:
for type in types:
if type in self.concept_types:
self.add_type(type)
def get_types (self, shorthand=False):
if shorthand:
sh = ''.join([type[0:1] for type in self.types])
return sh
else:
return self.types
def add_group (self, group):
if group.__class__ == ''.__class__:
group = CategoryGroup(name=group, category_id=self.uid)
if not group.name in [g.name for g in self.groups]:
self.groups.append(group)
def clear_groups (self):
self.groups = []
def set_groups (self, groups):
self.clear_groups()
for group in groups:
self.add_group(group)
def get_groups (self):
return self.groups
def add_concept (self, concept):
if not concept.concept_id in [c.concept_id for c in self.concepts] \
and not concept.uid in [c.uid for c in self.concepts]:
self.concepts.append(concept)
def remove_concept (self, context, concept):
cursor = context.get_cursor()
for c in self.concepts:
if concept.uid == c.uid:
self.concepts.remove(c)
try:
cursor.execute("""
DELETE FROM category_concepts
WHERE uid = %s
""", concept.uid)
except Exception, e:
self.logger.error('Could not remove concept %s (%s)', concept.uid, e)
def update_concept (self, concept):
self.remove(concept)
self.add_concept(concept)
def get_concepts (self):
return self.concepts
def load (self, context, load_concepts=False):
if self.uid == -1:
return
cursor = context.get_cursor()
cursor.execute("""
SELECT *
FROM categories
WHERE uid = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = cursor.fetchone()
row = dtuple.DatabaseTuple(desc, row)
self.name = row['name']
self.set_types(row['concept_types'])
cursor.execute("""
SELECT *
FROM category_groups
WHERE category_id = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
group = CategoryGroup(uid=row['uid'], category_id=self.uid,
name=row['name'])
self.add_group(group)
if load_concepts:
cursor.execute("""
SELECT *
FROM category_concepts
WHERE category_id = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
cat_concept = CategoryConcept(uid=row['uid'],
category_id=self.uid,
concept_id=row['concept_id'])
cat_concept.is_broad = row['is_broad']
cat_concept.is_default = row['is_default']
cat_concept.load(context)
self.add_concept(cat_concept)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO categories
(uid, name, concept_types)
VALUES
(NULL, %s, %s)
""", (self.name, self.get_types(shorthand=True)))
cursor.execute("""
SELECT LAST_INSERT_ID() AS new_uid
""")
row = cursor.fetchone()
self.uid = row[0]
for group in self.groups:
group.category_id = self.uid
group.save(context)
else:
cursor.execute("""
UPDATE categories
SET name = %s, concept_types = %s
WHERE uid = %s
""", (self.name, self.get_types(shorthand=True), self.uid))
def load_categories (context):
cursor = context.get_cursor()
categories = []
cursor.execute("""
SELECT uid
FROM categories
ORDER BY name
""")
rows = cursor.fetchall()
for row in rows:
category = Category(uid=row[0])
category.load(context)
categories.append(category)
return categories
class CategoryGroup (DTable):
def __init__ (self, uid=-1, category_id=-1, name=''):
self.uid = uid
self.category_id = category_id
self.name = name
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO category_groups
(uid, category_id, name)
VALUES
(NULL, %s, %s)
""", (self.category_id, self.name))
else:
cursor.execute("""
UPDATE category_groups
SET name = %s
WHERE uid = %s
""", (self.name, self.uid))
class CategoryConcept (DTable):
def __init__ (self, uid=-1, category_id=-1, concept_id=-1, term=''):
self.uid = uid
self.category_id = category_id
self.concept_id = concept_id
self.is_broad = False
self.is_default = False
self.term = term
self.groups = []
self.concept = None
self.logger = logging.getLogger(str(self.__class__))
def load (self, context):
cursor = context.get_cursor()
if self.uid == -1:
if not self.concept_id == -1 \
and not self.category_id == -1:
cursor.execute("""
SELECT *
FROM category_concepts
WHERE category_id = %s
AND concept_id = %s
""", (self.category_id, self.concept_id))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = cursor.fetchone()
if row:
row = dtuple.DatabaseTuple(desc, row)
self.uid = row['uid']
self.is_broad = row['is_broad']
self.is_default = row['is_default']
else:
self.logger.debug('No matched rows')
return
else:
self.logger.debug('Not enough info')
return
else:
cursor.execute("""
SELECT *
FROM category_concepts
WHERE uid = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = cursor.fetchone()
if row:
row = dtuple.DatabaseTuple(desc, row)
self.concept_id = row['concept_id']
self.is_broad = row['is_broad']
self.is_default = row['is_default']
cursor.execute("""
SELECT *
FROM category_concept_groups
WHERE category_concept_id = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
self.groups.append(row['category_group_id'])
self.concept = Concept(context, self.concept_id)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO category_concepts
(uid, category_id, concept_id,
is_default, is_broad)
VALUES
(NULL, %s, %s,
%s, %s)
""", (self.category_id, self.concept_id,
int(self.is_default), int(self.is_broad)))
cursor.execute("""
SELECT LAST_INSERT_ID() AS new_uid
""")
row = cursor.fetchone()
self.uid = row[0]
else:
cursor.execute("""
UPDATE category_concepts
SET is_default = %s,
is_broad = %s
WHERE uid = %s
""", (int(self.is_default), int(self.is_broad), self.uid))
| {
"repo_name": "dchud/sentinel",
"path": "canary/concept.py",
"copies": "1",
"size": "17138",
"license": "mit",
"hash": 6898701715702141000,
"line_mean": 33.4828973843,
"line_max": 89,
"alpha_frac": 0.5041428405,
"autogenerated": false,
"ratio": 4.183060776177691,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5187203616677691,
"avg_score": null,
"num_lines": null
} |
# $Id$
import logging
import traceback
import pyparsing as pyp
import PyLucene
import canary.loader
from canary.concept import Concept
from canary.gazeteer import Feature
from canary.pubmed import Journal
from canary.study import Study
from canary.utils import render_capitalized
# All possible search fields, indexed by all valid forms.
SEARCH_FIELDS = {
'1au': 'first-author',
'ab': 'abstract',
'abstract': 'abstract',
'af': 'affiliation',
'affiliation': 'affiliation',
'all': 'all',
'au': 'author',
'author': 'author',
'authors': 'author',
'date': 'pubdate',
'exp': 'exposures',
'exposure': 'exposures',
'exposures': 'exposures',
'gn': 'grantnum',
'grantnum': 'grantnum',
'has-exposures': 'has-exposures',
'has-exposure-linkage': 'has-exposure-linkage',
'has-genomic': 'has-genomic',
'has-interspecies': 'has-interspecies',
'has-outcomes': 'has-outcomes',
'has-outcome-linkage': 'has-outcome-linkage',
'has-relationships': 'has-relationships',
'is': 'issue',
'issn': 'issn',
'issue': 'issue',
'jn': 'journal',
'journal': 'journal',
'keyword': 'keyword',
'keywords': 'keyword',
'kw': 'keyword',
'loc': 'location',
'location': 'location',
'locations': 'location',
'me': 'methodology',
'meth': 'methodology',
'methodology': 'methodology',
'mh': 'subject',
'out': 'outcomes',
'outcome': 'outcomes',
'outcomes': 'outcomes',
'pd': 'pubdate',
'page': 'pages',
'pages': 'pages',
'pg': 'pages',
'registrynum': 'registrynum',
'rf': 'risk-factors',
'risk-factor': 'risk-factors',
'risk-factors': 'risk-factors',
'rn': 'registrynum',
'sh': 'subject',
'spec': 'species',
'species': 'species',
'subject': 'subject',
'subjects': 'subject',
'ti': 'title',
'title': 'title',
'ui': 'unique-identifier',
'uid': 'unique-identifier',
'unique-identifier': 'unique-identifier',
'vol': 'volume',
'volume': 'volume',
'word': 'keyword',
'year': 'pubdate',
}
def disassemble_user_query(s):
"""
Pre-process user-specified search terms so they may be more easily
converted into lucene-friendly tokens.
Input can be any arbitrary string received from the UI.
Output is a single python list, where each element is either a
single string (eg. 'foo'), or a two-tuple where the first item is
the string to search for and the second is an abbreviated field
name (eg. ('foo', 'ti')).
Note: eventually the query parser will be smart enough to support
concept matching. At present the parser step defers to lucene for
handling of overly-complex query strings, and hands off the output
list for re-joining for lucene (see below).
Presently the PyParsing library (see documentation inside package)
is used for the parsing step. This step occurs by default in
SearchIndex.preprocess_query(). Preprocessing can be turned off by
specifying "preprocess=False" as a parameter to SearchIndex.search().
It performs the following tasks:
* converts any of {'and', 'or', 'not'} to {'AND', 'OR', 'NOT'} in-place
o "foo and bar" becomes "foo AND bar"
o "foo and or not bar" becomes "
* allows field specifications in the form "token [field]" or "token.field."
o 'foo [ti]' becomes "('foo', 'ti')"
o 'foo.ti.' becomes "('foo', 'ti')"
o Note: fields are specified elsewhere
"""
LPAREN = pyp.Literal('(')
RPAREN = pyp.Literal(')')
LBRACK = pyp.Literal('[')
RBRACK = pyp.Literal(']')
SQUOTE = pyp.Literal("'")
TILDE = pyp.Literal('~')
DOT = pyp.Literal('.')
AND = pyp.CaselessLiteral('AND')
OR = pyp.CaselessLiteral('OR')
NOT = pyp.CaselessLiteral('NOT')
BOOLEANS = AND | OR | NOT
PLUS = pyp.Literal('+')
MINUS = pyp.Literal('-')
# Basic word tokens (allow "'" and ":" in token)
WORD = ~BOOLEANS + pyp.Combine(pyp.Optional(PLUS | MINUS) + \
pyp.Word(pyp.alphanums + pyp.alphas8bit + "'-:") + \
pyp.Optional(TILDE))
# Leave double-quoted strings as a single token
wTokens = WORD | pyp.dblQuotedString
# Fielded values in any of the forms specified above
bracketed_field = pyp.Suppress(LBRACK) + WORD + pyp.Suppress(RBRACK)
dotted_field = pyp.Suppress(DOT) + WORD + pyp.Suppress(DOT)
bracketed_token = pyp.Group(wTokens + \
pyp.Suppress(pyp.Optional(pyp.White())) + \
bracketed_field)
dotted_token = pyp.Group(wTokens + dotted_field)
fielded_tokens = bracketed_token | dotted_token
tokens = fielded_tokens | wTokens
# Boolean phrase (may nest)
bPhrase = pyp.Forward()
bPhrase << (tokens + BOOLEANS + (tokens | bPhrase))
# Parenthetical phrase (may nest)
pPhrase = pyp.Forward()
query_tokens = tokens ^ BOOLEANS ^ bPhrase ^ pPhrase
pPhrase << LPAREN + pyp.OneOrMore(query_tokens) + RPAREN
# Adding it all up
query = pyp.ZeroOrMore(query_tokens)
parse_results = query.parseString(s)
return parse_results[:]
def reassemble_user_query (l):
"""
Input of the query re-assembly step is the list output of the
parsing step. The list is joined using the following patterns:
* fielded tokens are replaced with their full "lucene form"
o ('foo', 'ti') becomes 'title:foo'
Output of the query re-assembly step is a single string intended,
in turn, for parsing by lucene's QueryParser.
"""
out = []
for t in l:
# Re-arrange fielded tokens
if t.__class__ == pyp.ParseResults:
field = t[1].lower()
if field in SEARCH_FIELDS.keys():
t = '%s:%s' % (SEARCH_FIELDS[field], t[0])
else:
t = t[0]
out.append(t)
return ' '.join([str(x) for x in out])
class Search:
FIELDS = [
'author',
'title',
'canary id',
'unique id',
'keyword',
]
def __init__ (self, field='keyword', token='', allow_curated=True,
allow_uncurated=False):
self.field = field
self.token = token.strip()
self.allow_curated = allow_curated
self.allow_uncurated = allow_uncurated
self.logger = logging.getLogger(str(self.__class__))
def search (self, context, term_mapping={}):
results = Results()
if self.field == '' \
or self.token == '':
return results
cursor = context.get_cursor()
try:
if self.field == 'canary id':
token = int(self.token)
record = canary.loader.QueuedRecord(context, token)
results.add_result(record)
elif self.field == 'keyword':
select_clause = """
SELECT DISTINCT queued_record_metadata.queued_record_id
FROM queued_record_metadata, queued_records, studies
WHERE queued_record_metadata.queued_record_id = queued_records.uid
AND queued_records.study_id = studies.uid
"""
if self.allow_curated:
if self.allow_uncurated:
# Can be anything, no need to restrict
pass
else:
# Should be curated articles only
select_clause += ' AND queued_records.status = %s ' % \
canary.loader.QueuedRecord.STATUS_CURATED
select_clause += ' AND studies.article_type > %s ' % \
Study.ARTICLE_TYPES['irrelevant']
else:
# Should be uncurated articles only
select_clause += ' AND queued_records.status != %s' % \
canary.loader.QueuedRecord.STATUS_CURATED
search_token = '%' + self.token.replace(' ', '% ') + '%'
cursor.execute(select_clause + """
AND value LIKE %s
""", search_token
)
rows = cursor.fetchall()
for row in rows:
record = ueuedRecord(context, row[0])
results.add_result(record)
else:
search_token = self.token + '%'
if self.field in self.FIELDS \
and term_mapping.has_key(self.field):
if self.field == 'title':
search_token = '%' + search_token
select_clause = """
SELECT DISTINCT queued_record_metadata.queued_record_id
FROM queued_record_metadata, queued_records, studies
WHERE queued_record_metadata.queued_record_id = queued_records.uid
AND queued_records.study_id = studies.uid
AND ("""
select_clause += ' OR '.join(['(term_id = %s) ' % \
term.uid for term in term_mapping[self.field]])
select_clause += ')'
if self.allow_curated:
if self.allow_uncurated:
# Can be anything, no need to restrict
pass
else:
# Should be curated articles only
select_clause += ' AND queued_records.status = %s' % \
canary.loader.QueuedRecord.STATUS_CURATED
select_clause += ' AND studies.article_type > %s ' % \
Study.ARTICLE_TYPES['irrelevant']
else:
# Should be uncurated articles only
select_clause += ' AND queued_records.status != %s' % \
canary.loader.QueuedRecord.STATUS_CURATED
cursor.execute(select_clause + ' AND value LIKE %s ', search_token)
rows = cursor.fetchall()
for row in rows:
record = canary.loader.QueuedRecord(context, row[0])
results.add_result(record)
except Exception, e:
self.logger.error('Unable to perform search:', e)
self.logger.error(traceback.format_stack())
return results
class Results:
def __init__ (self):
self.records = []
def add_result (self, record):
self.records.append(record)
def get_results (self):
return self.records
class PubmedSearch:
"""
Query/download records from Pubmed, one function per EUtility:
http://eutils.ncbi.nlm.nih.gov/entrez/query/static/eutils_help.html
"""
def __init__ (self):
self.efetch_url = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id=[[UI]]&mode=text&report=medline&tool=canarydb&[email protected]'
self.esearch_url = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=[[TERMS]]&tool=canarydb&[email protected]'
self.logger = logging.getLogger(str(self.__class__))
def fetch (self, ui):
"""
Fetch one or more records; ui can be a single ui, or a list of uis.
Returns a single list of text lines to be parsed.
http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id=15148598&mode=text&report=medline
"""
import types, urllib
try:
if isinstance(ui, types.ListType):
url = self.efetch_url.replace('[[UI]]',
(','.join([str(pmid) for pmid in ui])))
else:
url = self.efetch_url.replace('[[UI]]', str(ui))
data = urllib.urlopen(url)
return data.read().split('\n')
except:
return []
def _get_text (self, p):
return ''.join([n.data for n in p.childNodes if n.nodeType==n.TEXT_NODE])
def search (self, terms):
"""
Search for a set of terms, returns a list of IDs to parse, which
is then fed to self.fetch for data retrieval.
"""
import types, urllib
from xml.dom import pulldom
id_list = []
try:
if isinstance(terms, types.ListType):
url = self.esearch_url.replace('[[TERMS]]',
urllib.quote_plus((' '.join([str[term] for term in terms]))))
else:
url = self.esearch_url.replace('[[TERMS]]',
urllib.quote_plus(str(terms)))
xmls = urllib.urlopen(url).read()
events = pulldom.parseString(xmls)
for event, node in events:
if event == 'START_ELEMENT' \
and node.tagName == 'Id':
events.expandNode(node)
id = self._get_text(node)
id_list.append(id)
except Exception, e:
self.logger.error('Unable to search Pubmed:', e)
self.logger.error(traceback.format_stack())
return []
if len(id_list) > 0:
return self.fetch(id_list)
else:
return []
class SearchIndex:
def __init__ (self, context=None):
self.context = context
self.logger = logging.getLogger(str(self.__class__))
def index_record (self, record, writer=None):
# field, value, store?, index?, token?
try:
if not writer:
had_writer = False
writer = self.context.get_search_index_writer(False)
else:
had_writer = True
study = Study(self.context, record.study_id)
self.logger.debug('starting document')
doc = PyLucene.Document()
# First, we need to create a unique key so we can later delete
# if necessary. Will try simply uid for now.
doc.add(PyLucene.Field('uid', str(record.uid),
True, True, False))
doc.add(PyLucene.Field('all', str(record.uid),
True, True, False))
# Second, save internal-use metadata. These should probably
# be x'd out at Query-time.
doc.add(PyLucene.Field('record-status', str(record.status),
False, True, False))
doc.add(PyLucene.Field('article-type', str(study.article_type),
False, True, False))
source_catalog = self.context.get_source_catalog()
complete_term_map = source_catalog.get_complete_mapping()
mapped_metadata = record.get_mapped_metadata(complete_term_map)
# First index all the non-multiple metadata fields
for field in ('abstract', 'affiliation', 'issn',
'journal', 'pubdate', 'issue', 'pages', 'title',
'volume'):
val = mapped_metadata.get(field, None)
if val:
doc.add(PyLucene.Field(field, val,
False, True, True))
doc.add(PyLucene.Field('all', val,
False, True, True))
# Be sure to index all of (abbrev, full title, issn) as "journal"
issn = mapped_metadata.get('issn')
if issn:
j = Journal()
j.load_from_issn(self.context, issn)
no_dash = j.no_dash()
self.logger.debug('indexing journal: %s, abbv:%s, issn:%s' % \
(j.journal_title, j.abbreviation, issn))
doc.add(PyLucene.Field('journal', issn,
False, True, True))
doc.add(PyLucene.Field('journal', no_dash,
False, True, True))
doc.add(PyLucene.Field('all', issn,
False, True, True))
doc.add(PyLucene.Field('all', no_dash,
False, True, True))
if j.abbreviation:
doc.add(PyLucene.Field('journal', j.abbreviation,
False, True, True))
doc.add(PyLucene.Field('all', j.abbreviation,
False, True, True))
if j.journal_title:
doc.add(PyLucene.Field('journal', j.journal_title,
False, True, True))
doc.add(PyLucene.Field('all', j.journal_title,
False, True, True))
# If a page range is given, index the first page, assuming
# the delimiter is '-'
pages = mapped_metadata.get('pages', None)
if pages \
and '-' in pages:
first_page = pages[0:pages.index('-')]
doc.add(PyLucene.Field('pages', first_page,
False, True, True))
doc.add(PyLucene.Field('all', first_page,
False, True, True))
# 'unique_identifier' must be specially treated because
# of the '_'
val = mapped_metadata.get('unique_identifier', None)
if val:
doc.add(PyLucene.Field('unique-identifier', val,
False, True, True))
doc.add(PyLucene.Field('all', val,
False, True, True))
# Next, index all the possibly-multiple metadata fields
# Give these (especially for author and subject) a little
# boost, less than for canary UMLS concepts
for field in ('author', 'grantnum', 'keyword', 'registrynum',
'subject'):
vals = mapped_metadata.get(field, None)
for val in vals:
doc.add(PyLucene.Field(field, val,
False, True, True))
f = PyLucene.Field('all', val,
False, True, True)
f.setBoost(1.3)
doc.add(f)
# If at least one author name is available, index the first
# author to support first-author searching. Also, boost it
# slightly higher than the other authors.
authors = mapped_metadata.get('author', None)
if authors:
doc.add(PyLucene.Field('first-author', authors[0],
False, True, True))
f = PyLucene.Field('all', authors[0],
False, True, True)
f.setBoost(1.5)
doc.add(f)
# All the booleans
for bool in ('has_outcomes', 'has_exposures',
'has_relationships', 'has_interspecies',
'has_exposure_linkage', 'has_outcome_linkage',
'has_genomic'):
val = getattr(study, bool)
# NOTE: I think lucene dislikes '_' in field names ??
boolstr = bool.replace('_', '-')
doc.add(PyLucene.Field(boolstr, str(int(val)),
False, True, False))
# NOTE: no need to add this to 'all'. I think.
# Now, all the UMLS concepts. Simpler approach to
# lucene "synonym injection", but it works! Give it
# slightly bigger boost than keywords/subjects
for ctype in ('exposures', 'outcomes', 'risk_factors',
'species'):
# NOTE: I think lucene dislikes '_' in field names ??
ctype_search = ctype.replace('_', '-')
for val in getattr(study, ctype):
concept = Concept(self.context, val.concept_id)
for syn in concept.synonyms:
doc.add(PyLucene.Field(ctype_search,
unicode(syn, 'latin-1'),
False, True, True))
f = PyLucene.Field('all', unicode(syn, 'latin-1'),
False, True, True)
f.setBoost(2.0)
doc.add(f)
# And, the locations
gazeteer = self.context.get_gazeteer()
locs = []
for location in study.locations:
feature = Feature(self.context, uid=location.feature_id)
feature.load(self.context)
if gazeteer.fips_codes.has_key((feature.country_code, feature.adm1)):
region_name = gazeteer.fips_codes[(feature.country_code, feature.adm1)]
else:
region_name = ''
full_name = '%s (%s, %s, %s)' % (feature.name,
gazeteer.feature_codes[feature.feature_type],
render_capitalized(region_name),
render_capitalized(gazeteer.country_codes[feature.country_code]))
doc.add(PyLucene.Field('location', unicode(full_name, 'latin-1'),
False, True, True))
doc.add(PyLucene.Field('all', unicode(full_name, 'latin-1'),
False, True, True))
# Finally, the methodologies
for meth in study.methodologies:
doc.add(PyLucene.Field('methodology',
meth.get_study_type(text=True),
False, True, True))
doc.add(PyLucene.Field('all',
meth.get_study_type(text=True),
False, True, True))
# And each exposure route term
for route in meth.get_routes(True):
doc.add(PyLucene.Field('exposure_route',
route, False, True, True))
doc.add(PyLucene.Field('all',
route, False, True, True))
writer.addDocument(doc)
if not had_writer:
writer.close()
except Exception, e:
self.logger.error('Failed to index record: %s', e)
self.logger.error(traceback.print_exc())
def unindex_record (self, record):
"""
Unindex documents matching this entry's uid. *Should*
only be one, but could be many, if somehow the same entry
got indexed multiple times.
"""
reader = self.context.get_search_index_reader()
term = PyLucene.Term('uid', str(record.uid))
reader.deleteDocuments(term)
reader.close()
def search (self, query_string='', require_visible=True,
allow_curated=True):
hits = []
query_string = str(query_string)
self.logger.info('Performing search: %s' % query_string)
disassembled_query = disassemble_user_query(query_string)
self.logger.debug('Disassembled query: %s' % str(disassembled_query))
reassembled_query = '+(%s)' % reassemble_user_query(disassembled_query)
self.logger.debug('Reassembled query: %s', reassembled_query)
if not allow_curated:
reassembled_query += \
' -record-status:%s' % canary.loader.QueuedRecord.STATUS_CURATED
if require_visible:
reassembled_query += ' +article-type:[%s TO %s]' % \
(Study.ARTICLE_TYPES['traditional'],
Study.ARTICLE_TYPES['curated'])
reassembled_query += ' +record-status:%s' % \
canary.loader.QueuedRecord.STATUS_CURATED
try:
searcher = PyLucene.IndexSearcher(PyLucene.FSDirectory.getDirectory(
self.context.config.search_index_dir, False))
analyzer = PyLucene.StandardAnalyzer()
query_parser = PyLucene.QueryParser('all', analyzer)
query_parser.setOperator(PyLucene.QueryParser.DEFAULT_OPERATOR_AND)
query = query_parser.parseQuery(reassembled_query)
self.logger.info('Search query: %s', query)
hits = searcher.search(query)
return hits, searcher
except Exception, e:
self.logger.error('Search failed: %s', e)
#self.logger.error(traceback.format_stack())
if hits \
and searcher:
return hits, searcher
else:
return [], None
class RecordSearcher:
"""Conduct an index search and get QueuedRecord objects back
searcher = RecordSearcher(context)
records = searcher.search("canary")
"""
def __init__ (self, context):
self.context = context
self.searcher = SearchIndex(context)
def search (self, query, curated_only=False):
"""pass in a pylucene query and get back a list of QueuedRecord
objects.
"""
records = []
hits, searcher = self.searcher.search(query)
for i, doc in hits:
uid = doc.get(str('uid'))
record = canary.loader.QueuedRecord(self.context, int(uid))
if curated_only \
and record.status != record.STATUS_CURATED:
continue
records.append(record)
return records
class StudySearcher:
"""Conduct a lucene search and get back a list of relevant studies.
searcher = StudySearcher(context)
studies = searcher.search("canary")
"""
def __init__ (self, context):
self.context = context
self.searcher = RecordSearcher(context)
def search (self, query):
"""perform a lucene search for studies
"""
studies = []
for record in self.searcher.search(query, curated_only=True):
studies.append(Study(self.context, record.study_id))
return studies
| {
"repo_name": "dchud/sentinel",
"path": "canary/search.py",
"copies": "1",
"size": "27077",
"license": "mit",
"hash": 7512486513420560000,
"line_mean": 38.8777614138,
"line_max": 174,
"alpha_frac": 0.5082542379,
"autogenerated": false,
"ratio": 4.167615822687394,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5175870060587394,
"avg_score": null,
"num_lines": null
} |
# $Id$
import logging
import dtuple
class ValueGroup:
"""
A single group of discrete values from the EAV model.
"""
def __init__ (self, group_name, description, value_group_id=-1,
allow_multiple=0):
self.value_group_id = value_group_id
self.group_name = group_name
self.description = description
self.allow_multiple = allow_multiple
self.values = {}
def __repr__ (self):
s = []
s.append("<ValueGroup ")
s.append("\tvalue_group_id=%d" % self.value_group_id)
s.append("\tgroup_name=%s" % self.group_name)
s.append("\tdescription=%s" % self.description)
s.append("\allow_multiple=%s" % self.allow_multiple)
s.append("/>")
return "\n".join(s)
def add_value (self, value):
if value.__class__.__name__ == 'Value':
#self.values[value.value_id] = value
self.values[value.serial_number] = value
def has_value (self, value_id):
for serial_number, value in self.values.items():
if value.value_id == value_id:
return True
return False
def delete_value (self, context, value):
if self.has_value(value.value_id):
del(self.values[value.serial_number])
value.delete(context)
def get_value (self, value_id):
for serial_number, value in self.values.items():
if value.value_id == value_id:
return value
def get_values (self):
values = [(value.serial_number, value) for id, value in self.values.items()]
values.sort()
return values
def group_size (self):
return len(self.values)
def is_multiple (self):
return self.allow_multiple
def save (self, context, update_values=True):
cursor = context.get_cursor()
try:
if self.value_group_id == -1:
cursor.execute("""
INSERT INTO dv_group
(dv_group_id, group_name, description, allow_multiple)
VALUES (NULL, %s, %s, %s)
""", (self.group_name, self.description, int(self.allow_multiple)))
else:
cursor.execute("""
UPDATE dv_group
SET group_name = %s,
description = %s,
allow_multiple = %s,
WHERE dv_group_id = %s
""", (self.group_name, self.description, self.value_group_id, int(self.allow_multiple)))
if update_values:
cursor.execute("""
DELETE FROM dv_values
WHERE dv_group_id = %s
""", self.value_group_id)
for value_id in self.values:
value = self.values[value_id]
value.save(context)
except:
# FIXME: log something here. how to handle? define Error.
pass
class Value:
"""
A single potential value, one of many within a single group,
from the EAV model.
"""
def __init__ (self,
value_group_id,
serial_number,
description,
value_id=-1):
self.value_id = value_id
self.value_group_id = value_group_id
self.serial_number = serial_number
self.description = description
self.logger = logging.getLogger(str(self.__class__))
def __repr__ (self):
s = []
s.append("<Value ")
s.append("\tvalue_id=%d" % self.value_id)
s.append("\tvalue_group_id=%d" % self.value_group_id)
s.append("\tserial_number=%d" % self.serial_number)
s.append("\tdescription=%s" % self.description)
s.append(" />")
return "\n".join(s)
def save (self, context):
cursor = context.get_cursor()
try:
if self.value_id == -1:
cursor.execute("""
INSERT INTO dv_values
(dv_id, dv_group_id, serial_number, description)
VALUES (NULL, %s, %s, %s)
""", (self.value_group_id, self.serial_number, self.description))
else:
cursor.execute("""
UPDATE dv_values
SET dv_group_id = %s, serial_number = %s, description = %s
WHERE dv_id = %s
""", (self.value_group_id, self.serial_number, self.description,
self.value_id))
except:
# FIXME: define Errors.
pass
def delete (self, context):
cursor = context.get_cursor()
try:
cursor.execute("""
DELETE FROM dv_values
WHERE dv_id = %s
""", self.value_id)
except:
# FIXME: define Errors
pass
class DBModel:
"""
Provides access and maintenance functions for EAV-style discrete value
groups, of which there is only a simple two-table setup in the sentinel
db as of early July 2003.
"""
def __init__ (self):
self.value_groups = {}
def load (self, context, debug=0):
cursor = context.get_cursor()
# load all value groups
cursor.execute('SELECT * FROM dv_group')
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
if row == None: break
# FIXME: dtuplify
row = dtuple.DatabaseTuple(desc, row)
group = ValueGroup(row['group_name'],
row['description'],
int(row['dv_group_id']))
cursor.execute("""
SELECT * FROM dv_values
WHERE dv_group_id = %s
""", group.value_group_id)
val_fields = [d[0] for d in cursor.description]
val_desc = dtuple.TupleDescriptor([[f] for f in val_fields])
val_rows = cursor.fetchall()
for val_row in val_rows:
val_row = dtuple.DatabaseTuple(val_desc, val_row)
value = Value(int(val_row['dv_group_id']),
int(val_row['serial_number']),
val_row['description'],
int(val_row['dv_id']))
group.add_value(value)
self.add_group(group)
def get_value (self, value_id):
for group_id, group in self.value_groups.items():
if group.has_value(value_id):
return group.get_value(value_id)
def add_group (self, group):
if not group.__class__.__name__ == "ValueGroup":
return
self.value_groups[group.value_group_id] = group
def get_group (self, group_id):
if self.value_groups.has_key(group_id):
return self.value_groups[group_id]
def get_groups (self):
group_ids = self.value_groups.keys()
group_ids.sort()
groups = []
for id in group_ids:
groups.append(self.value_groups[id])
return groups
def has_group (self, group_name):
for group in self.get_groups():
if group.group_name == group_name:
return True
return False
def delete_group (self, context, group_id):
cursor = context.get_cursor
try:
group_id = int(group_id)
# FIXME: add existing value error checking
cursor.execute("""
DELETE FROM dv_values
WHERE dv_group_id = %s
""", group_id)
cursor.execute("""
DELETE FROM dv_group
WHERE dv_group_id = %s
""", group_id)
except:
# FIXME: errors.
pass
def get_value_description (self, group_id, serial_number):
try:
group = self.value_groups[group_id]
value = group.values[serial_number]
return value.description
except:
return ''
def model_size (self):
return len(self.value_groups)
| {
"repo_name": "dchud/sentinel",
"path": "canary/db_model.py",
"copies": "1",
"size": "8422",
"license": "mit",
"hash": -4312848658477355000,
"line_mean": 30.9015151515,
"line_max": 108,
"alpha_frac": 0.4972690572,
"autogenerated": false,
"ratio": 4.215215215215215,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5212484272415215,
"avg_score": null,
"num_lines": null
} |
# $Id$
import lxml.objectify
import rwproperty
import zope.app.container.interfaces
import zope.component
import zope.interface
import zeit.cms.content.property
import zeit.content.quiz.interfaces
import zeit.content.quiz.container
import zeit.content.quiz.quiz
ANSWER_TEMPLATE = u"""\
<answer xmlns:py="http://codespeak.net/lxml/objectify/pytype" />"""
class Answer(zeit.content.quiz.container.Contained,
zeit.content.quiz.quiz.ContentBase):
"""A possible answer to a question of a quiz.
"""
zope.interface.implements(zeit.content.quiz.interfaces.IAnswer,
zope.app.container.interfaces.IContained)
correct = zeit.cms.content.property.ObjectPathProperty('.correct')
default_template = ANSWER_TEMPLATE
@rwproperty.getproperty
def answer(self):
return self.convert.to_html(self.get_node('text'))
@rwproperty.setproperty
def answer(self, value):
return self.convert.from_html(self.get_node('text'), value)
def __eq__(self, other):
if not zeit.content.quiz.interfaces.IAnswer.providedBy(other):
return False
return self.xml == other.xml
def __ne__(self, other):
return not (self == other)
answerFactory = zeit.content.quiz.container.xml_tree_content_adapter(Answer)
@zope.component.adapter(zeit.content.quiz.interfaces.IAnswer)
@zope.interface.implementer(zeit.content.quiz.interfaces.IQuestion)
def get_question(context):
return context.__parent__
| {
"repo_name": "ZeitOnline/zeit.content.quiz",
"path": "src/zeit/content/quiz/answer.py",
"copies": "1",
"size": "1505",
"license": "bsd-3-clause",
"hash": 2036262043217085400,
"line_mean": 26.8703703704,
"line_max": 76,
"alpha_frac": 0.707641196,
"autogenerated": false,
"ratio": 3.4597701149425286,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46674113109425286,
"avg_score": null,
"num_lines": null
} |
# $Id$
import os.path
import jon.wt as wt
import jon.cgi as cgi
class MultiForm(wt.TemplateCode):
name = "standard"
stages = 0
filename = "mf%d.html"
keys = ()
xhtml = 1
def init(self):
pass
def update(self):
pass
def check(self):
pass
def _get_name(self):
if self.name:
return "multiform_%s" % self.name
return None
def delete(self):
key = self._get_name()
if key is not None and key in self.wt.session:
del self.wt.session[key]
def main(self, template):
key = self._get_name()
# Find existing container, if it's in the session
if key is not None:
self.container = self.wt.session.get(key)
if self.container is None:
self.container = {}
self.wt.session[key] = self.container
self.init()
else:
self.container = {}
self.init()
# Determine which stage we're at. Initialise if this is a new run.
if key is None:
self.stage = self.req.params.get("multiform_stage", "")
else:
self.stage = self.req.params.get("%s_stage" % key, "")
try:
self.stage = int(self.stage)
except ValueError:
self.stage = 0
if key is not None:
self.container.clear()
self.init()
# Create the stage objects. Update the container with submitted form
# values. Update stage objects with container values.
self.stage_objs = []
for i in range(-1, self.stages):
if i >= 0:
self.stage_objs.append(getattr(self, "stage%d" % i)(self))
self.stage_objs[i].container = self.container
self.stage_objs[i].errors = []
stage_obj = self.stage_objs[i]
else:
stage_obj = self
for key in stage_obj.keys:
if key.endswith("!*"):
ckey = key[:-2]
elif key.endswith("!") or key.endswith("*"):
ckey = key[:-1]
else:
ckey = key
if key in self.req.params:
value = None
if key.endswith("!"):
if self.req.params[key].body:
value = self.req.params[key].body
else:
value = self.req.params[key]
if value is not None:
if hasattr(stage_obj, "update_%s" % ckey):
getattr(stage_obj, "update_%s" % ckey)(value)
else:
self.container[ckey] = value
if not hasattr(stage_obj, ckey):
setattr(stage_obj, ckey, self.container.get(ckey, ""))
# Check for errors and adjust stage as necessary.
self.errors = []
self.update()
self.check()
for i in range(self.stage):
self.stage_objs[i].update()
self.stage_objs[i].check()
if self.stage_objs[i].errors:
self.errors += self.stage_objs[i].errors
if self.stage > i:
self.stage = i
# Display appropriate stage object.
template = os.path.dirname(self.wt.template) + "/" + self.filename \
% self.stage
obj = self.stage_objs[self.stage]
if obj.template_as_file:
obj.main(open(template, "rb"))
else:
encoding = self.wt.get_template_encoding()
if encoding is None:
obj.main(open(template, "rb").read())
else:
obj.main(unicode(open(template, "rb").read(), encoding))
class Stage(wt.TemplateCode):
keys = ()
def form(self):
close = ""
if self.outer.xhtml:
close = " /"
if self.outer.name is not None:
return '<input type="hidden" name="multiform_%s_stage" value="%d"%s>' % \
(cgi.html_encode(self.outer.name), self.outer.stage + 1, close)
s = ['<input type="hidden" name="multiform_stage" value="%d"%s>' % \
(self.outer.stage + 1, close)]
for key in self.outer.container.keys():
if key not in self.outer.stage_objs[self.outer.stage].keys:
for value in (self.outer.container[key] if key.endswith("*")
else (self.outer.container[key],)):
s.append('<input type="hidden" name="%s" value="%s"%s>' % \
(cgi.html_encode(key), cgi.html_encode(value), close))
return "".join(s)
def update(self):
pass
def check(self):
pass
class header(wt.TemplateCode):
class errors(wt.TemplateCode):
class error(wt.TemplateCode):
def main(self, template):
for self.error in self.outer.outer.outer.outer.errors:
self.process(template)
class noerrors(wt.TemplateCode):
pass
def main(self, template):
if self.outer.outer.errors:
self.process(template, "errors")
else:
self.process(template, "noerrors")
| {
"repo_name": "jribbens/jonpy",
"path": "jon/wt/multiform.py",
"copies": "1",
"size": "4565",
"license": "mit",
"hash": -7116683247324626000,
"line_mean": 26.8353658537,
"line_max": 79,
"alpha_frac": 0.5798466594,
"autogenerated": false,
"ratio": 3.5305491105955142,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.953076329488923,
"avg_score": 0.015926495021257037,
"num_lines": 164
} |
# $Id$
import re
import urllib
import dtuple
import canary.context
from canary.utils import DTable
class Journal (canary.context.Cacheable, DTable):
"""
A simple structure containing exactly one row of the NLM MEDLINE
journal list, stored locally in the db as "medline_journals".
http://www.ncbi.nlm.nih.gov/entrez/citmatch_help.html#JournalLists
"""
CACHE_KEY = 'journal'
ALT_CACHE_KEY = 'issn'
def __init__ (self, context=None, uid=-1):
try:
if self.journal_title:
return
except AttributeError:
pass
self.uid = uid
self.journal_title = ''
self.abbreviation = ''
self.issn = ''
self.eissn = ''
self.iso_abbr = ''
self.nlm_id = ''
def no_dash (self):
"""Return the ISSN without the dash."""
return self.issn[0:4] + self.issn[5:]
def load_from_issn (self, context, issn):
if not issn:
return
if context.config.use_cache:
j = context.cache_get('%s:%s' % (self.ALT_CACHE_KEY, issn))
if j \
and j.journal_title:
for att in ('uid', 'journal_title', 'abbreviation',
'issn', 'eissn', 'iso_abbr', 'nlm_id'):
self.set(att, getattr(j, att))
return
cursor = context.get_cursor()
cursor.execute("""
SELECT uid, journal_title, abbreviation, nlm_id
FROM medline_journals
WHERE issn = %s
""", issn)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
if rows:
row = rows[0]
row = dtuple.DatabaseTuple(desc, row)
for k, v in row.items():
self.set(k, v)
self.issn = issn
if context.config.use_cache:
context.cache_set('%s:%s' % (self.CACHE_KEY, self.uid), self)
context.cache_set('%s:%s' % (self.ALT_CACHE_KEY, self.issn), self)
def load (self, context):
if self.uid == -1:
return
# Is it already loaded? Convenience check for client calls
# don't need to verify loads from the cache.
if context.config.use_cache:
try:
if self.journal_title:
# Already loaded
return
except AttributeError:
# Note already loaded, so continue
pass
cursor = context.get_cursor()
cursor.execute("""
SELECT journal_title, abbreviation, issn, nlm_id
FROM medline_journals
WHERE uid = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
for k, v in row.items():
self.set(k, v)
if context.config.use_cache:
context.cache_set('%s:%s' % (self.ALT_CACHE_KEY, self.issn), self)
xslt = """<?xml version="1.0" encoding="UTF-8"?>
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
<xsl:output method='html'/>
<xsl:template match='/'>
<xsl:apply-templates select='//MedlineCitation'/>
</xsl:template>
<xsl:template match='MedlineCitation'>
<xsl:apply-templates select='PMID'/>
<xsl:apply-templates select='Article/Journal/JournalIssue'/>
<xsl:apply-templates select='MedlineJournalInfo/MedlineTA'/>
<xsl:apply-templates select='Article/ArticleTitle'/>
<xsl:apply-templates select='Article/Pagination/MedlinePgn'/>
<xsl:apply-templates select='Article/Abstract/AbstractText'/>
<xsl:apply-templates select='Article/Affiliation'/>
<xsl:apply-templates select='Article/AuthorList/Author'/>
</xsl:template>
<xsl:template match='PMID'>
<xsl:value-of select='.'/>
</xsl:template>
</xsl:stylesheet>
"""
class Pubmed:
def __init__ (self):
self.tool = 'ycmi_canary_database'
self.email = '[email protected]'
self.count = 0
self.query_key = 0
self.web_env = ''
self.results = ''
self.re_query_key = re.compile(r'<QueryKey>(.*)</QueryKey>')
self.re_web_env = re.compile(r'<WebEnv>(.*)</WebEnv>')
self.re_count = re.compile(r'<Count>(.*)</Count>')
self.re_body_tags = re.compile('</?(Html|Body)>')
self.re_title_data = re.compile('<Title>.*</Title>\\n')
def url_tool_info (self):
return '&tool=%s&email=%s' % (self.tool, self.email)
"""
Simple esearch following definition at:
http://eutils.ncbi.nlm.nih.gov/entrez/query/static/esearch_help.html
Note: currently handles no options such as date range.
"""
def esearch (self, query, use_history=True):
url = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed'
if use_history:
url = url + '&usehistory=y'
else:
url = url + '&usehistory=n'
url = url + '&term=' + urllib.quote_plus(query)
url = url + self.url_tool_info()
data = urllib.urlopen(url).read()
self.count = self.re_count.search(data).groups()[0]
if use_history:
self.query_key = self.re_query_key.search(data).groups()[0]
self.web_env = self.re_web_env.search(data).groups()[0]
return (self.count, self.query_key, self.web_env)
"""
Simple elink following definition at:
http://eutils.ncbi.nlm.nih.gov/entrez/query/static/elink_help.html
Note: currently only handles cmd=neighbor with no options.
"""
def elink (self, pmid, use_history=True):
pass
"""
Simple efetch following definition at:
http://eutils.ncbi.nlm.nih.gov/entrez/query/static/efetch_help.html
Returns the query result data in the specified format.
Note that retmode='html' will have surrounding <html> and <body> tags;
strip_body_tags, if true, will remove these, leaving a <pre> formatted
result layout.
"""
def efetch (self, query_key, webenv,
retstart=0, retmax=10,
retmode='text', rettype='citation',
strip_body_tags=True):
url = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed'
url = url + '&WebEnv=%s' % (webenv)
url = url + '&query_key=%s' % (query_key)
url = url + '&retstart=%s' % (retstart)
url = url + '&retmax=%s' % (retmax)
url = url + '&retmode=%s' % (retmode)
url = url + '&rettype=%s' % (rettype)
url = url + self.url_tool_info()
data = urllib.urlopen(url).read()
if strip_body_tags:
data = self.re_body_tags.sub('', data)
data = self.re_title_data.sub('', data)
self.results = data
| {
"repo_name": "dchud/sentinel",
"path": "canary/pubmed.py",
"copies": "1",
"size": "7141",
"license": "mit",
"hash": 8656622201880145000,
"line_mean": 27.6787148594,
"line_max": 83,
"alpha_frac": 0.5558045092,
"autogenerated": false,
"ratio": 3.5386521308225967,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45944566400225967,
"avg_score": null,
"num_lines": null
} |
# $Id$
import struct, socket, sys, errno, os, select
import cgi, fakefile
log_level = 0
log_name = "/tmp/fcgi.log"
log_file = None
log_lock = None
FCGI_LISTENSOCK_FILENO = 0
FCGI_VERSION_1 = 1
FCGI_BEGIN_REQUEST = 1
FCGI_ABORT_REQUEST = 2
FCGI_END_REQUEST = 3
FCGI_PARAMS = 4
FCGI_STDIN = 5
FCGI_STDOUT = 6
FCGI_STDERR = 7
FCGI_DATA = 8
FCGI_GET_VALUES = 9
FCGI_GET_VALUES_RESULT = 10
FCGI_UNKNOWN_TYPE = 11
FCGI_KEEP_CONN = 1
FCGI_RESPONDER = 1
FCGI_AUTHORIZER = 2
FCGI_FILTER = 3
FCGI_REQUEST_COMPLETE = 0
FCGI_CANT_MPX_CONN = 1
FCGI_OVERLOADED = 2
FCGI_UNKNOWN_ROLE = 3
_log_printable = "." * 32 + "".join(chr(c) for c in range(32, 127)) + "." * 129
def _log(level, message, data=None):
global log_file
if log_level >= level:
import time
if data:
if not isinstance(data, str):
data = str(data)
data = data.translate(_log_printable)
pos = 0
while pos < len(data):
message += "\n " + data[pos:pos+70]
pos += 70
if log_lock:
log_lock.acquire()
try:
if not log_file:
log_file = open(log_name, "a", 1)
log_file.write("%s %s\n" % (time.strftime("%b %d %H:%M:%S"), message))
finally:
if log_lock:
log_lock.release()
def set_logging(level, filename=None):
global log_level, log_name
if filename and filename != log_name:
if log_file:
raise Exception("Cannot change the log filename after it's been opened")
log_name = filename
log_level = level
# We shouldn't need this function, we should be able to just use socket.makefile
# instead, but Solaris 2.7 appears to be so broken that stdio doesn't work when
# you set the buffer size of a stream to zero.
def _sockread(sock, length):
data = []
while length > 0:
newdata = sock.recv(length)
if not newdata:
raise EOFError("End-of-file reading socket")
data.append(newdata)
length -= len(newdata)
return "".join(data)
class Record(object):
def __init__(self, insock=None):
if insock:
data = _sockread(insock, 8)
(self.version, self.type, self.request_id, content_length,
padding_length) = struct.unpack("!BBHHBx", data)
self.content_data = _sockread(insock, content_length)
_sockread(insock, padding_length)
else:
self.version = FCGI_VERSION_1
def encode(self):
padding_length = -len(self.content_data) & 7
return struct.pack("!BBHHBB", self.version, self.type, self.request_id,
len(self.content_data), padding_length, 0) + self.content_data + \
"\x00" * padding_length
class NameValueData(object):
def __init__(self, data=""):
self.values = []
pos = 0
while pos < len(data):
if ord(data[pos]) & 128:
name_length = struct.unpack("!I", data[pos:pos+4])[0] & 0x7fffffff
pos += 4
else:
name_length = ord(data[pos])
pos += 1
if ord(data[pos]) & 128:
value_length = struct.unpack("!I", data[pos:pos+4])[0] & 0x7fffffff
pos += 4
else:
value_length = ord(data[pos])
pos += 1
if pos + name_length + value_length > len(data):
raise ValueError("Unexpected end-of-data in NameValueRecord")
self.values.append((data[pos:pos+name_length],
data[pos+name_length:pos+name_length+value_length]))
pos += name_length + value_length
def encode_one(self, name, value):
if len(name) > 127:
namelen = struct.pack("!I", len(name) | (-0x7fffffff-1))
else:
namelen = chr(len(name))
if len(value) > 127:
valuelen = struct.pack("!I", len(value) | (-0x7fffffff-1))
else:
valuelen = chr(len(value))
return namelen + valuelen + name + value
def encode(self):
return "".join([self.encode_one(name, value)
for name, value in self.values])
class InputStream(fakefile.FakeInput):
def __init__(self, connection, streamname, threaded):
fakefile.FakeInput.__init__(self)
self.data = []
self.eof = 0
self.connection = connection
self.streamname = streamname
self.threaded = threaded
if self.threaded:
import threading
self.sema = threading.Semaphore(0)
def add_data(self, s):
if s:
self.data.append(s)
else:
self.eof = 1
if self.threaded:
self.sema.release()
def _read(self, nbytes=-1):
while not self.eof and not self.data:
if self.threaded:
self.sema.acquire()
else:
self.connection.process_input(self.streamname)
if self.eof and not self.data:
return ""
return self.data.pop(0)
class Connection(object):
def __init__(self, socket, handler_types, request_type, params,
threading_level):
self.socket = socket
self.handler_types = handler_types
self.request_type = request_type
self.fileno = self.socket.fileno()
self.params = params
self.threading_level = threading_level
if self.threading_level > 1:
import thread
self.socketlock = thread.allocate_lock()
else:
self.socketlock = None
def log(self, level, request_id, message, data=None):
if log_level >= level:
if request_id:
_log(level, "%3d/%3d %s" % (self.fileno, request_id, message), data)
else:
_log(level, "%3d %s" % (self.fileno, message), data)
def close(self):
if self.socketlock is not None:
self.socketlock.acquire()
try:
self.socket.close()
finally:
self.socketlock.release()
else:
self.socket.close()
def write(self, rec):
try:
if self.socketlock is not None:
self.socketlock.acquire()
try:
self.socket.sendall(rec.encode())
finally:
self.socketlock.release()
else:
self.socket.sendall(rec.encode())
except socket.error, x:
if x[0] == errno.EPIPE:
for req in self.requests.values():
req.aborted = 2
else:
raise
def run(self):
self.log(2, 0, "New connection running")
self.requests = {}
self.process_input(None)
def process_input(self, waitstream):
while 1:
try:
# this select *should* be pointless, however it works around a bug
# in OpenBSD whereby the read() does not get interrupted when
# another thread closes the socket (and it does no harm on other
# OSes)
select.select([self.socket], [], [])
rec = Record(self.socket)
except:
x = sys.exc_info()[1]
if isinstance(x, (EOFError, ValueError)) or \
(isinstance(x, socket.error) and x[0] == errno.EBADF):
self.log(2, 0, "EOF received on connection")
for req in self.requests.values():
req.aborted = 2
break
else:
raise
if rec.type == FCGI_GET_VALUES:
data = NameValueData(rec.content_data)
self.log(3, 0, "< FCGI_GET_VALUES", data.values)
reply = Record()
reply.type = FCGI_GET_VALUES_RESULT
reply.request_id = 0
reply_data = NameValueData()
for nameval in data.values:
if self.params and nameval[0] in self.params:
reply_data.values.append(nameval[0], str(self.params[nameval[0]]))
elif nameval[0] == "FCGI_MAX_CONNS":
if self.threading_level < 1:
reply_data.values.append(("FCGI_MAX_CONNS", "1"))
else:
reply_data.values.append(("FCGI_MAX_CONNS", "10"))
elif nameval[0] == "FCGI_MAX_REQS":
if self.threading_level < 1:
reply_data.values.append(("FCGI_MAX_REQS", "1"))
else:
reply_data.values.append(("FCGI_MAX_REQS", "10"))
elif nameval[0] == "FCGI_MPXS_CONNS":
if self.threading_level < 2:
reply_data.values.append(("FCGI_MPXS_CONNS", "0"))
else:
reply_data.values.append(("FCGI_MPXS_CONNS", "1"))
self.log(3, 0, "> FCGI_GET_VALUES_RESULT", reply_data.values)
reply.content_data = reply_data.encode()
self.write(reply)
elif rec.type == FCGI_BEGIN_REQUEST:
(role, flags) = struct.unpack("!HB", rec.content_data[:3])
handler_type = self.handler_types.get(role)
self.log(2, rec.request_id,
"< FCGI_BEGIN_REQUEST: role = %d, flags = %d" % (role, flags))
if not handler_type:
self.log(2, rec.request_id, "no handler for this role, rejecting")
reply = Record()
reply.type = FCGI_END_REQUEST
reply.request_id = rec.request_id
reply.content_data = struct.pack("!IBBBB",
0, FCGI_UNKNOWN_ROLE, 0, 0, 0)
self.log(3, rec.request_id, "> FCGI_END_REQUEST: FCGI_UNKNOWN_ROLE")
self.write(reply)
elif waitstream is not None:
self.log(2, rec.request_id, "already handling a request, rejecting")
reply = Record()
reply.type = FCGI_END_REQUEST
reply.request_id = rec.request_id
reply.content_data = struct.pack("!IBBBB",
0, FCGI_CANT_MPX_CONN, 0, 0, 0)
self.log(3, rec.request_id, "> FCGI_END_REQUEST: FCGI_CANT_MPX_CONN")
self.write(reply)
else:
req = self.request_type(handler_type, self, rec.request_id, flags,
self.threading_level)
self.requests[rec.request_id] = req
elif rec.type == FCGI_PARAMS:
req = self.requests.get(rec.request_id)
if req:
if rec.content_data:
data = NameValueData(rec.content_data)
self.log(3, rec.request_id, "< FCGI_PARAMS", data.values)
for nameval in data.values:
req.environ[nameval[0]] = nameval[1]
else:
self.log(3, rec.request_id, "< FCGI_PARAMS: <empty>")
if self.threading_level > 1:
self.log(2, rec.request_id, "starting request thread")
import thread
thread.start_new_thread(req.run, ())
else:
self.log(2, rec.request_id, "executing request")
req.run()
else:
self.log(2, rec.request_id, "< FCGI_PARAMS: unknown request_id",
rec.content_data)
elif rec.type == FCGI_ABORT_REQUEST:
req = self.requests.get(rec.request_id)
if req:
self.log(2, rec.request_id, "< FCGI_ABORT_REQUEST")
req.aborted = 1
else:
self.log(2, rec.request_id,
"< FCGI_ABORT_REQUEST: unknown request_id")
elif rec.type == FCGI_STDIN:
req = self.requests.get(rec.request_id)
if req:
if log_level >= 4:
self.log(4, rec.request_id, "< FCGI_STDIN", rec.content_data)
req.stdin.add_data(rec.content_data)
if waitstream == "stdin":
return
else:
self.log(2, rec.request_id, "< FCGI_STDIN: unknown request_id",
rec.content_data)
elif rec.type == FCGI_DATA:
req = self.requests.get(rec.request_id)
if req:
if log_level >= 4:
self.log(4, rec.request_id, "< FCGI_DATA", rec.content_data)
req.fcgi_data.add_data(rec.content_data)
if waitstream == "fcgi_data":
return
else:
self.log(2, rec.request_id, "< FCGI_DATA: unknown request_id",
rec.content_data)
else:
self.log(2, rec.request_id, "< unknown type %d" % rec.type)
reply = Record()
reply.type = FCGI_UNKNOWN_TYPE
reply.request_id = 0
reply.content_data = chr(rec.type) + "\x00" * 7
self.log(3, "> FCGI_UNKNOWN_TYPE")
self.write(reply)
class Request(cgi.Request):
_fcgi_fallback_type = cgi.CGIRequest
def __init__(self, handler_type, connection, request_id, flags,
threading_level):
cgi.Request.__init__(self, handler_type)
self.__connection = connection
self.__request_id = request_id
self.__flags = flags
self.__threading_level = threading_level
self.fcgi_data = InputStream(connection, "fcgi_data", threading_level > 1)
self.stdin = InputStream(connection, "stdin", threading_level > 1)
self.environ = {}
self._stderr_used = 0
def log(self, level, message, data=None):
global log_file
if log_level >= level:
_log(level, "%3d/%3d %s" % (self.__connection.fileno,
self.__request_id, message), data)
def run(self):
try:
self.log(2, "New request running")
self._init()
self.log(2, "Calling handler")
try:
handler = self._handler_type()
except:
self.traceback()
else:
try:
handler.process(self)
except:
handler.traceback(self)
self.log(2, "Handler finished")
self.flush()
if self.aborted < 2:
try:
rec = Record()
rec.type = FCGI_STDOUT
rec.request_id = self.__request_id
rec.content_data = ""
self.log(2, "> FCGI_STDOUT: <close>")
self.__connection.write(rec)
if self._stderr_used:
rec.type = FCGI_STDERR
self.log(2, "> FCGI_STDERR: <close>")
self.__connection.write(rec)
rec.type = FCGI_END_REQUEST
rec.content_data = struct.pack("!IBBBB", 0, FCGI_REQUEST_COMPLETE,
0, 0, 0)
self.log(2, "> FCGI_END_REQUEST")
self.__connection.write(rec)
except IOError, x:
if x[0] == errno.EPIPE:
self.log(2, "EPIPE during request finalisation")
else:
raise
finally:
if not self.__flags & FCGI_KEEP_CONN:
self.__connection.close()
self.log(2, "Closed connection")
del self.__connection.requests[self.__request_id]
self.log(2, "Request complete")
def _write(self, s):
if log_level >= 4:
self.log(4, "> FCGI_STDOUT", s)
self._recwrite(FCGI_STDOUT, s)
def error(self, s):
if log_level >= 4:
self.log(4, "> FCGI_STDERR", s)
self._recwrite(FCGI_STDERR, s)
self._stderr_used = 1
def _recwrite(self, type, s):
if s:
pos = 0
while pos < len(s):
if self.aborted:
return
rec = Record()
rec.type = type
rec.request_id = self.__request_id
if pos == 0 and len(s) <= 65535:
# (avoid copying in the common case of s <= 65535 bytes)
rec.content_data = s
else:
rec.content_data = s[pos:pos+65535]
pos += len(rec.content_data)
try:
self.__connection.write(rec)
except IOError, x:
if x[0] == errno.EPIPE:
self.aborted = 2
self.log(2, "Aborted due to EPIPE")
else:
raise
class GZipRequest(cgi.GZipMixIn, Request):
_fcgi_fallback_type = cgi.GZipCGIRequest
class Server(object):
def __init__(self, handler_types, max_requests=0, params=None,
request_type=Request, threading_level=1):
global log_lock
self.handler_types = handler_types
self.max_requests = max_requests
self.params = params
self.request_type = request_type
self.log(2, "theading_level = %d" % threading_level)
if threading_level > 0:
try:
import thread
log_lock = thread.allocate_lock()
except ImportError, x:
threading_level = 0
self.log(2, "cannot import thread (%s), disabling threading" % str(x))
self.threading_level = threading_level
def log(self, level, message):
if log_level >= level:
_log(level, " %s" % message)
def exit(self):
self._sock.close()
def run(self):
self.log(1, "Server.run()")
if "FCGI_WEB_SERVER_ADDRS" in os.environ:
web_server_addrs = os.environ["FCGI_WEB_SERVER_ADDRS"].split(",")
else:
web_server_addrs = None
self.log(1, "web_server_addrs = %s" % repr(web_server_addrs))
self._sock = socket.fromfd(sys.stdin.fileno(), socket.AF_INET,
socket.SOCK_STREAM)
try:
self._sock.getpeername()
except socket.error, x:
if x[0] != errno.ENOTSOCK and x[0] != errno.ENOTCONN:
raise
if x[0] == errno.ENOTSOCK:
self.log(1, "stdin not socket - falling back to CGI")
self.request_type._fcgi_fallback_type(
self.handler_types[FCGI_RESPONDER]).process()
return
self._sock.setblocking(1)
while 1:
try:
# this select *should* be pointless, however it works around a bug
# in OpenBSD whereby the accept() does not get interrupted when
# another thread closes the socket (and it does no harm on other
# OSes)
select.select([self._sock], [], [])
(newsock, addr) = self._sock.accept()
except socket.error, x:
if x[0] == errno.EBADF:
break
raise
self.log(1, "accepted connection %d from %s" %
(newsock.fileno(), repr(addr)))
if web_server_addrs and (len(addr) != 2 or \
addr[0] not in web_server_addrs):
self.log(1, "not in web_server_addrs - rejected")
newsock.close()
continue
conn = Connection(newsock, self.handler_types, self.request_type,
self.params, self.threading_level)
del newsock
if self.threading_level > 0:
import thread
thread.start_new_thread(conn.run, ())
else:
conn.run()
if self.max_requests > 0:
self.max_requests -= 1
if self.max_requests <= 0:
self.log(1, "reached max_requests, exiting")
break
self._sock.close()
| {
"repo_name": "jribbens/jonpy",
"path": "jon/fcgi.py",
"copies": "1",
"size": "17501",
"license": "mit",
"hash": 558906104448596600,
"line_mean": 30.8779599271,
"line_max": 80,
"alpha_frac": 0.5793383235,
"autogenerated": false,
"ratio": 3.410836094328591,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4490174417828591,
"avg_score": null,
"num_lines": null
} |
# $Id $
############################################################################
import sys
from math import fabs
############################################################################
LatticeError="LATTICE_ERROR:"
true =(1>0) #get true
false=(1<0) #get false
eps=1.e-5 #precision limit for position calculations
debug=false
#.........................................................
class Lattice:
"""The lattice is a list of elements. Each element is associated with one and only one acclerator node."""
def __init__(self,name,base=0):
"""Create an empty lattice with a 'name' and a 'base' position."""
self.name=name
self.base=base #the base position of the latice
self.elements=[] #the element list
#an empty lattice
self.elements.append(Marker(0,0,"BEGIN "+name))
self.elements.append(Marker(0))
self.elements.append(Marker(0,0,"END "+name))
#all elements get same offset
self.updateBases()
def __len__(self):
"""Return the lattice length in number of elements."""
return len(self.elements)
def __getitem__(self,index):
"""Get element with 'index' from lattice."""
return self.elements[index]
def getLength(self):
"""Return the lattice length in distance units."""
return self.elements[-1].getEndPosition()
def getName(self):
"""Return the lattice name."""
return self.name
def append(self,element):
"""Append an element at the end of the lattice."""
lattice_end=self.elements.pop()
last=self.elements[-1]
start_pos=element.getStartPosition()
#space to fill up with drift space
drift_len=start_pos-last.getPosition()
if drift_len < -eps:
#ooops negative length: severe error
raise LatticeError,"length is negative when appending: "+element.getName()
elif fabs(drift_len) < eps:
#too short drift: ignore
pass
else:
#add an upstream drift space
drift_pos=last.getPosition()+drift_len*0.5
self.elements.append(Drift(drift_pos,drift_len))
self.elements.append(Marker(start_pos))
#add the element
self.elements.append(element)
end_pos=element.getEndPosition()
self.elements.append(Marker(end_pos))
#place 'lattice END' marker
self.elements.append(Marker(end_pos,0,lattice_end.getName()))
#all elements get same offset
self.updateBases()
def appendTuple(self,tuple):
"""Append a tuple to the end of the lattice"""
for el in tuple:
self.append(el)
def insert(self,element):
"""Insert a zero length element into the lattice. Lumped elements in the lattice will be sliced accordingly."""
if element.getLength() != 0.0:
raise LatticeError,"length must be zero when inserting: "+element.getName()
#assemble a list of indexes of marker elements
markers=[]
for i in range(len(self)):
if self.elements[i].type == "marker":
markers.append(i)
#search the markers that embrace the element
before=markers[0]
after=markers[-1]
for m in markers[1:-1]:
if self.elements[m].getPosition() <= element.getPosition():
before=m
continue
after=m
break
#slice the element between the two markers
between=after-1
self.elements[between:after]=self.elements[between].split(element)
#all elements get same offset
self.updateBases()
def clearMarkers(self):
"""Remove all marker elements from the lattice."""
lattice=[]
begin=self.elements[0]
end =self.elements[-1]
lattice.append(begin)
for el in self.elements:
if el.getType() == 'marker': continue
lattice.append(el)
lattice.append(end)
self.elements=lattice
def updateBases(self):
"""NOT FOR PUBLIC USE"""
#all elements get same offset
for el in self.elements:
el.setBase(self.base)
def joinDrifts(self):
"""Join neighboring drift spaces into a single one."""
lattice=[]
#pass 1
end=self.elements[-1]
ix=0
while ix < len(self)-1:
el=self.elements[ix]
ix=ix+1
if el.getType() == 'drift':
elnext=self.elements[ix]
if elnext.getType() == 'drift':
ix=ix+1
len1=el.getLength()
len2=elnext.getLength()
spos1=el.getStartPosition()
spos2=elnext.getStartPosition()
jlen=len1+len2
jpos=spos1+jlen*0.5
jspos=jpos-jlen*0.5
jdrift=Drift(jpos,jlen)
lattice.append(jdrift)
if debug: print "join drifts:",(spos1,len1),"+",(spos2,len2),"=",(jspos,jlen)
else:
lattice.append(el)
else:
lattice.append(el)
lattice.append(end)
self.elements=lattice
#pass 2
#check if there are still touching drifts in the lattice
#if so call this function recursively until all done
ix=0
while ix < len(self)-1:
el=self.elements[ix]
ix=ix+1
if el.getType() == 'drift':
elnext=self.elements[ix]
if elnext.getType() == 'drift':
#repeat the joining process
self.joinDrifts()
return
else:
pass
else:
pass
return
def cout(self):
"""Make a printed output of the lattice."""
for count in range(len(self)):
el=self.elements[count]
abs_start_pos=el.getAbsPosition()-el.getLength()*0.5
print "s=",abs_start_pos,"m\t",el.getName(),"\t",el.getType(),el.getPosition(),el.getLength()
print "Totals: length of",self.getName(),"=",self.getLength(),"m with",len(self),"elements."
def isConsistent(self):
"""Consistency check"""
for ix in range(len(self)-1):
el=self[ix]
next=self[ix+1]
el_pos=el.getPosition()
el_len=el.getLength()
next_pos=next.getPosition()
next_len=next.getLength()
if fabs(el_pos+(el_len+next_len)*0.5-next_pos) > eps:
raise LatticeError,"inconsistent distances between "+el.getName()+" and "+next.getName()
#.........................................................
class Element:
"""The super class of all lattice elements."""
def __init__(self,name,position=0,len=0):
"""NOT FOR PUBLIC USE"""
self.name=name
self.position=position
self.base=0
self.len=len
def getStartPosition(self):
"""Return the upstream start position of this element."""
return self.position-self.len*0.5
def getEndPosition(self):
"""Return the downstream end position of this element."""
return self.position+self.len*0.5
def getPosition(self):
"""Return the center position of this element."""
return self.position
def getAbsPosition(self):
"""Return the absolute center position of this element."""
return self.position+self.base
def getLength(self):
"""Return the length of the element in distance units."""
return self.len
def getName(self):
"""Return the name of the element."""
return self.name
def getBase(self):
"""Return the base for relative positions."""
return self.base
def getSlicePositions(self,cut_pos):
"""NOT FOR PUBLIC USE"""
#calculate length and position of sliced parts
up_len=cut_pos-self.getStartPosition()
if fabs(up_len) < eps: up_len = 0
dn_len=self.getLength()-up_len
if fabs(dn_len) < eps: dn_len = 0
up_pos=self.getStartPosition()+up_len*0.5
dn_pos=self.getEndPosition()-dn_len*0.5
return (up_pos,up_len,dn_pos,dn_len)
def setBase(self,base):
"""NOT FOR PUBLIC USE"""
#Set the base for relative positions.
self.base=base
def setPosition(self,position):
"""Set the element position."""
self.position=position
def setLength(self,length):
"""Set the element length."""
self.len=length
def split(self,insert,CLASS):
"""NOT FOR PUBLIC USE"""
#The slice (and replace) operation. The thick element 'self' of type 'CLASS'
#is cut into an upstream and a downstream part and then element 'insert'
#is inserted (with limiting markers) into the lattice.
cut_pos=insert.getPosition()
positions=self.getSlicePositions(cut_pos)
upstream=CLASS(positions[0],positions[1],self.getName())
dnstream=CLASS(positions[2],positions[3],self.getName()+"+")
marker=Marker(cut_pos)
if fabs(upstream.getLength()) < eps:
return [insert,marker,self]
elif fabs(dnstream.getLength()) < eps:
return [self,marker,insert]
else:
return [upstream,marker,insert,marker,dnstream]
#.........................................................
class Drift(Element):
"""The drift space element."""
type="drift"
def __init__(self,position,len,name="---"):
"""Create the element: center position and len(ght) are needed"""
Element.__init__(self,name,position,len)
def getType(self):
"""Return the element type."""
return self.type;
def split(self,insert):
"""NOT FOR PUBLIC USE"""
#Split this element to place element 'insert' in between.
return Element.split(self,insert,Drift)
#.........................................................
class Dipole(Element):
"""The dipole element."""
type="dipole"
def __init__(self,position,len,name="DIP"):
"""Create the element: center position and len(ght) are needed"""
Element.__init__(self,name,position,len)
def getType(self):
"""Return the element type."""
return self.type;
def split(self,insert):
"""NOT FOR PUBLIC USE"""
#Split this element to place element 'insert' in between.
return Element.split(self,insert,Dipole)
#.........................................................
class Quadrupole(Element):
"""The quadrupole element."""
type="quadrupole"
def __init__(self,position,len,name="NQP"):
"""Create the element: center position and len(ght) are needed"""
Element.__init__(self,name,position,len)
def getType(self):
"""Return the element type."""
return self.type;
def split(self,insert):
"""NOT FOR PUBLIC USE"""
#Split this element to place element 'insert' in between.
return Element.split(self,insert,Quadrupole)
#.........................................................
class Sextupole(Element):
"""The sextupole element."""
type="sextupole"
def __init__(self,position,len,name="NSX"):
"""Create the element: center position and len(ght) are needed"""
Element.__init__(self,name,position,len)
def getType(self):
"""Return the element type."""
return self.type;
def split(self,insert):
"""NOT FOR PUBLIC USE"""
#Split this element to place element 'insert' in between.
return Element.split(self,insert,Sextupole)
#.........................................................
class SkewQuad(Element):
"""The skew quadrupole element."""
type="skew_quadrupole"
def __init__(self,position,len,name="SQP"):
"""Create the element: center position and len(ght) are needed"""
Element.__init__(self,name,position,len)
def split(self,insert):
"""NOT FOR PUBLIC USE"""
#Split this element to place element 'insert' in between.
return Element.split(self,insert,SkewQuad)
#.........................................................
class SkewSext(Element):
"""The skew sextupole element."""
type="skew_sextupole"
def __init__(self,position,len,name="SSX"):
"""Create the element: center position and len(ght) are needed"""
Element.__init__(self,name,position,len)
def getType(self):
"""Return the element type."""
return self.type;
def split(self,insert):
"""NOT FOR PUBLIC USE"""
#Split this element to place element 'insert' in between.
return Element.split(self,insert,SkewSext)
#.........................................................
class Octupole(Element):
"""The octupole element."""
type="octupole"
def __init__(self,position,len,name="OCT"):
"""Create the element: center position and len(ght) are needed"""
Element.__init__(self,name,position,len)
def getType(self):
"""Return the element type."""
return self.type;
def split(self,insert):
"""NOT FOR PUBLIC USE"""
#Split this element to place element 'insert' in between.
return Element.split(self,insert,Octupole)
#.........................................................
class ThinElement(Element):
"""The superclass of all thin elements"""
def __init__(self,name,position=0,len=0):
"""NOT FOR PUBLIC USE"""
Element.__init__(self,name,position,len)
def getStartPosition(self):
"""Return the upstream start position of this element."""
return Element.getPosition(self)
def getEndPosition(self):
"""Return the downstream end position of this element."""
return Element.getPosition(self)
def getLength(self):
"""Return the length of the element in distance units."""
return 0
def getEffLength(self):
"""Return the effective length of the element in distance units."""
return Element.getLength(self)
def getUpstreamDrift(self):
"""Return the upstream drift space."""
len=self.getEffLength()*0.5
position=Element.getPosition(self)
if fabs(len) < eps:
return Marker(position)
else:
position=position-len*0.5
return Drift(position,len)
def getDownstreamDrift(self):
"""Return the downstream drift space."""
len=self.getEffLength()*0.5
position=Element.getPosition(self)
if fabs(len) < eps:
return Marker(position)
else:
position=position+len*0.5
return Drift(position,len)
def asTuple(self):
"""Return the thin element as a tuple (drift,element,drift)"""
return (self.getUpstreamDrift(),self,self.getDownstreamDrift())
def split(self,insert):
"""NOT FOR PUBLIC USE"""
#Split this element to place element 'insert' in between.
return [self,insert]
#.........................................................
class Marker(ThinElement):
"""The marker element."""
type="marker"
def __init__(self,position,len=0,name="***"):
"""Create the element: center position and len(ght) are needed"""
ThinElement.__init__(self,name,position)
def getType(self):
"""Return the element type."""
return self.type;
#.........................................................
class RFGap(ThinElement):
"""The radio frequency gap element."""
type="rfgap"
def __init__(self,position,len=0,name="RFG"):
"""Create the element: center position and len(ght) are needed"""
ThinElement.__init__(self,name,position,len)
def getType(self):
"""Return the element type."""
return self.type;
#.........................................................
class BCMonitor(ThinElement):
"""The beam current monitor element."""
type="beamcurrentmonitor"
def __init__(self,position,len=0,name="BCM"):
"""Create the element: center position and len(ght) are needed"""
ThinElement.__init__(self,name,position,len)
def getType(self):
"""Return the element type."""
return self.type;
#.........................................................
class HSteerer(ThinElement):
"""The horizontal steerer element."""
type="hsteerer"
def __init__(self,position,len=0,name="DCH"):
"""Create the element: center position and len(ght) are needed"""
ThinElement.__init__(self,name,position)
def getType(self):
"""Return the element type."""
return self.type;
#.........................................................
class VSteerer(ThinElement):
"""The vertical steerer element."""
type="vsteerer"
def __init__(self,position,len=0,name="DCV"):
"""Create the element: center position and len(ght) are needed"""
ThinElement.__init__(self,name,position)
def getType(self):
"""Return the element type."""
return self.type;
#.........................................................
class BPMonitor(ThinElement):
"""The beam position monitor element."""
type="beampositionmonitor"
def __init__(self,position,len=0,name="BPM"):
"""Create the element: center position and len(ght) are needed"""
ThinElement.__init__(self,name,position)
def getType(self):
"""Return the element type."""
return self.type;
#.........................................................
class WScanner(ThinElement):
"""The wire scanner monitor element."""
type="wirescanner"
def __init__(self,position,len=0,name="WSM"):
"""Create the element: center position and len(ght) are needed"""
ThinElement.__init__(self,name,position)
def getType(self):
"""Return the element type."""
return self.type;
#.........................................................
if __name__ == '__main__':
test="Thick Element Test"
print test
lattice=Lattice(test,40.)
#..............................
try:
lattice.append(Dipole(0.5,0.3,"DIP/0"))
lattice.append(Quadrupole(0.85,0.15,"QH/0"))
#lattice.append(Quadrupole(1.1,0.15,"QV/3"))
lattice.append(Quadrupole(1.1,0.15))
lattice.append(Dipole(1.37,0.3,"DIP/1"))
#lattice.append(BPMonitor(1.57))
lattice.append(Drift(1.545,0.05))
#..............................
#lattice.insert(BPMonitor(0.0,0,">>>"))
#lattice.insert(BPMonitor(0.5,0,">>>"))
#lattice.insert(BPMonitor(1.1,0,">>>"))
#lattice.insert(BPMonitor(1.1375,0,">>>"))
#lattice.insert(BPMonitor(1.175,0,">>>"))
#lattice.insert(BPMonitor(1.57,0,">>>"))
#..............................
#lattice.insert(VSteerer(0.0))
#lattice.insert(HSteerer(0.0))
#lattice.insert(VSteerer(lattice.getLength()))
#lattice.insert(HSteerer(lattice.getLength()))
#..............................
#lattice.clearMarkers()
lattice.cout()
#..............................
test1="Thin Element Test"
print test1
lattice1=Lattice(test1,66)
rfgap=RFGap(10,20)
for part in rfgap.asTuple():
start_pos=part.getStartPosition()
name=part.getName()
type=part.getType()
position=part.getPosition()
length=part.getLength()
print start_pos,name,type,position,length
lattice1.append(part)
lattice1.clearMarkers()
lattice1.cout()
#..............................
lattice2=Lattice(test1,66)
rfgap=RFGap(45.3,0)
for part in rfgap.asTuple():
start_pos=part.getStartPosition()
name=part.getName()
type=part.getType()
position=part.getPosition()
length=part.getLength()
print start_pos,name,type,position,length
lattice2.append(part)
lattice2.clearMarkers()
lattice2.cout()
#..............................
except LatticeError,message:
print LatticeError,message
sys.exit(-1)
| {
"repo_name": "luxiaohan/openxal-csns-luxh",
"path": "core/src/xal/sim/slg/Lattice.py",
"copies": "3",
"size": "17611",
"license": "bsd-3-clause",
"hash": 6203877887158807000,
"line_mean": 29.627826087,
"line_max": 113,
"alpha_frac": 0.636250071,
"autogenerated": false,
"ratio": 3.2183845029239766,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5354634573923976,
"avg_score": null,
"num_lines": null
} |
# $Id$
import sys
import traceback
from canary.concept import Concept
import canary.context
from canary.human_study import HumanStudy
from canary import study
from canary.utils import DTable
import dtuple
this_module = sys.modules[__name__]
def summary_set_from_concept (context, concept_id, concept_type):
"""
For a given concept_id with a particular concept_type, return
the set of summary/study pairs for that concept.
"""
if concept_type in ('exposure', 'outcome'):
table_name = concept_type + 's'
elif concept_type == 'species':
table_name = concept_type
else:
return None
pairs = []
try:
cursor = context.get_cursor()
query = """
SELECT summary_concepts.*, %s.study_id
FROM %s, summary_concepts
WHERE %s.uid = summary_concepts.study_concept_id
AND %s.concept_id =
""" % (table_name, table_name, table_name, table_name)
cursor.execute(query + " %s ", int(concept_id))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
for row in cursor.fetchall():
row = dtuple.DatabaseTuple(desc, row)
summary = Summary(context, row['summary_id'])
st = study.Study(context, row['study_id'])
pairs.append((summary, st))
except Exception, e:
context.logger.error(e)
return None
return SummarySet(concept_type, concept_id, pairs)
def get_summaries_from_study (context, study):
"""
For a given Study, return a list of all relevant Summary objects.
"""
summaries = []
try:
cursor = context.get_cursor()
for m in study.methodologies:
cursor.execute("""
SELECT uid
FROM summaries
WHERE methodology_id = %s
ORDER BY uid
""", m.uid)
rows = cursor.fetchall()
for row in rows:
s = Summary(context, row[0])
summaries.append(s)
except Exception, e:
context.logger.error(e)
return summaries
def get_study_concept (concept_type):
"""For a SummaryConcept, get the concept instance related to the
Study itself from the appropriate table (which we must determine
dynamically)."""
# Figure out which table to use
table_name = self.CONCEPT_TYPES.get(self.concept_type, '')
if not table_name:
return None
# First, get the related study concept
this_module = sys.modules[__name__]
# Tricky: getattr should return one of (Exposure, Outcome, Species),
# then the parameters passed in should instantiate one of those types
study_concept = getattr(this_module,
table_name.capitalize())(context, self.study_concept_id)
return study_concept
class SummaryConcept (DTable):
"""
For a given Summary, an explicit statement that this study's related
concept is part of what is summarized.
"""
CONCEPT_TYPES = {'e': 'exposures', 'o': 'outcomes', 's': 'species'}
def __init__ (self, context=None, uid=-1):
self.uid = uid
self.summary_id = -1
self.concept_type = ''
self.study_concept_id = -1
class Summary (canary.context.Cacheable, DTable):
"""
A specific summary of the relationships between exposures, outcomes,
species, and human references for a single Study.
These are designed to be aggregated into UberSummaries (not yet defined!)
for a particular Concept across the whole database.
"""
TABLE_NAME = 'summaries'
CACHE_KEY = 'summary'
CACHE_CHECK_KEY = 'methodology_id'
def __init__ (self, context=None, uid=-1):
try:
if getattr(self, self.CACHE_CHECK_KEY):
return
except AttributeError:
pass
self.uid = uid
# Note: the methodology will reference a study_id, so don't store
# study_id again
self.methodology_id = 0
self.public_notes = ''
# Positive findings
self.has_susceptibility = False
self.has_latency = False
self.has_exposure_risk = False
self.has_warning = False
# Negative findings; store separately for easy query
self.hasnt_susceptibility = False
self.hasnt_latency = False
self.hasnt_exposure_risk = False
self.hasnt_warning = False
# Note: each of the following references a study_id already also
self.exposures = []
self.outcomes = []
self.species = []
# HumanStudy references
self.human_refs = []
def get_human_ref (self, context, ref_id):
"""Get a particular human reference."""
for ref in self.human_refs:
if ref == ref_id:
return HumanStudy(context, ref_id)
return None
def get_concept (self, context, concept_type, ref_id):
"""Get a particular exp/out/spec by id."""
if not concept_type in ('exposures', 'outcomes', 'species'):
return None
for c in getattr(self, concept_type):
if c.study_concept_id == ref_id:
return c
return None
def load (self, context):
if self.uid == -1:
return
# Is it already loaded? Convenience check for client calls
# don't need to verify loads from the cache.
if context.config.use_cache:
try:
if getattr(self, self.CACHE_CHECK_KEY):
# Already loaded
return
except AttributeError:
# Note already loaded, so continue
pass
try:
cursor = context.get_cursor()
# Summary, load thyself
cursor.execute("""
SELECT *
FROM summaries
WHERE uid = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
if not rows:
print 'Tried to load summary with id', self.uid
print traceback.print_exc()
raise ValueError
row = dtuple.DatabaseTuple(desc, rows[0])
for f in fields:
self.set(f, row[f])
# ...and thy concepts
cursor.execute("""
SELECT *
FROM summary_concepts
WHERE summary_id = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
summary_concept = SummaryConcept(uid=row['uid'])
summary_concept.summary_id = self.uid
summary_concept.concept_type = row['concept_type']
summary_concept.study_concept_id = row['study_concept_id']
getattr(self,
summary_concept.CONCEPT_TYPES[row['concept_type']]).append(summary_concept)
# ...and thy references
cursor.execute("""
SELECT human_study_id
FROM summary_human_refs
WHERE summary_id = %s
ORDER by uid
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
self.human_refs.append(row[0])
self.cache_set(context)
#except ValueError:
# raise ValueError
except:
print traceback.print_exc()
def save (self, context):
cursor = context.get_cursor()
try:
if self.uid == -1:
cursor.execute("""
INSERT INTO summaries
(methodology_id, public_notes, has_susceptibility,
has_latency, has_exposure_risk, has_warning,
hasnt_susceptibility, hasnt_latency,
hasnt_exposure_risk, hasnt_warning)
VALUES (%s, %s, %s,
%s, %s, %s,
%s, %s,
%s, %s)
""", (self.methodology_id, self.public_notes, int(self.has_susceptibility),
int(self.has_latency), int(self.has_exposure_risk), int(self.has_warning),
int(self.hasnt_susceptibility), int(self.hasnt_latency),
int(self.hasnt_exposure_risk), int(self.hasnt_warning)
))
self.uid = self.get_new_uid(context)
context.logger.info('Summary created with uid %s', self.uid)
else:
cursor.execute("""
UPDATE summaries
SET methodology_id = %s, public_notes = %s, has_susceptibility = %s,
has_latency = %s, has_exposure_risk = %s, has_warning = %s,
hasnt_susceptibility = %s, hasnt_latency = %s,
hasnt_exposure_risk = %s, hasnt_warning = %s
WHERE uid = %s
""", (self.methodology_id, self.public_notes, int(self.has_susceptibility),
int(self.has_latency), int(self.has_exposure_risk), int(self.has_warning),
int(self.hasnt_susceptibility), int(self.hasnt_latency),
int(self.hasnt_exposure_risk), int(self.hasnt_warning),
self.uid))
context.logger.info('Summary %s updated', self.uid)
# Update summary_concepts
cursor.execute("""
DELETE FROM summary_concepts
WHERE summary_id = %s
""", self.uid)
for key, table in SummaryConcept.CONCEPT_TYPES.items():
for concept in getattr(self, table):
cursor.execute("""
INSERT INTO summary_concepts
(summary_id, concept_type, study_concept_id)
VALUES (%s, %s, %s)
""", (self.uid, key, concept.study_concept_id))
# Update summary_human_refs
cursor.execute("""
DELETE FROM summary_human_refs
WHERE summary_id = %s
""", self.uid)
for id in self.human_refs:
cursor.execute("""
INSERT INTO summary_human_refs
(summary_id, human_study_id)
VALUES (%s, %s)
""", (self.uid, id))
self.cache_set(context)
except Exception, e:
print traceback.print_exc()
context.logger.error(e)
def delete (self, context):
cursor = context.get_cursor()
try:
# Get rid of all human studies referenced from this study
cursor.execute("""
DELETE FROM summary_human_refs
WHERE summary_id = %s
""", self.uid)
cursor.execute("""
DELETE FROM summary_concepts
WHERE summary_id = %s
""", self.uid)
cursor.execute("""
DELETE FROM summaries
WHERE uid = %s
""", self.uid)
self.cache_delete(context)
except Exception, e:
context.logger.error(e)
class SummaryRow:
"""
A single row in a user-visible SummarySet table. Provides
convenient access for rendering.
"""
pass
class SummarySet:
"""
A set of studies and summaries pertaining to a particular pair of species,
exposure, or outcome, in these combinations:
exposure -> species
outcome -> species
species -> exposure
"""
LEVELS = ['susceptibility', 'latency', 'exposure_risk', 'warning']
def __init__ (self, concept_type, concept_id, summary_pairs=[]):
"""
summary_pairs are (a_summary, its_study) tuples.
"""
self.concept_type = concept_type
self.concept_id = concept_id
self.summary_pairs = summary_pairs
self.study_concepts = {'exposures':{}, 'outcomes':{},
'species':{}}
self.concepts = {'exposures':{}, 'outcomes':{}, 'species':{}}
self.scores = {}
self.rows = []
for level in self.LEVELS:
setattr(self, 'has_%s' % level, 0)
setattr(self, 'hasnt_%s' % level, 0)
def set_scores (self, summary, meth):
try:
pass
except:
pass
def summarize (self, context):
"""
Generate summary rows suitable for rendering in the UI.
"""
self.study_concept = Concept(context, self.concept_id)
for summary, st in self.summary_pairs:
meth = study.Methodology()
meth.uid = summary.methodology_id
meth.load(context)
level = meth.evidence_level()
#self.set_scores(summary, meth)
for attr, concept_type in (
('exposures', 'Exposure'),
('outcomes', 'Outcome'),
('species', 'Species')):
for con in getattr(summary, attr):
concept = getattr(study, concept_type)()
concept.uid = con.study_concept_id
concept.load(context)
score_key = '%s:%s' % (concept_type, concept.term)
if not self.scores.has_key(score_key):
self.scores[score_key] = {}
for l in self.LEVELS:
store_record_id = False
has_level = 'has_%s' % l
if getattr(summary, has_level):
store_record_id = True
try:
self.scores[score_key][has_level] = \
max(self.scores[score_key][has_level], level)
except:
self.scores[score_key][has_level] = level
hasnt_level = 'hasnt_%s' % l
if getattr(summary, hasnt_level):
store_record_id = True
try:
self.scores[score_key][hasnt_level] = \
max(self.scores[score_key][hasnt_level], level)
except:
self.scores[score_key][hasnt_level] = level
if store_record_id:
# Save a list of record ids for easy searching later
record_set_name = '%s_recs' % l
try:
self.scores[score_key][record_set_name].append(st.record_id)
except:
self.scores[score_key][record_set_name] = [st.record_id]
return self.scores
| {
"repo_name": "dchud/sentinel",
"path": "canary/summary.py",
"copies": "1",
"size": "15704",
"license": "mit",
"hash": -2402277976975921000,
"line_mean": 34.5294117647,
"line_max": 98,
"alpha_frac": 0.5038206826,
"autogenerated": false,
"ratio": 4.4550354609929075,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5458856143592907,
"avg_score": null,
"num_lines": null
} |
# $Id$
import sys,os
import tempfile
from shutil import rmtree
import pkg_resources
import helpers as _helpers
import re
class Compression(object):
def __init__(self):
self.log = _helpers.get_logger(__name__)
try:
import tarfile as tarfile
global tarfile
except ImportError, e:
self.log.warn(
"tarfile module missing, gzip compression not available"
)
return None
class GzipCompression(Compression):
def __init__(self):
self.log = _helpers.get_logger(__name__)
Compression.__init__(self)
try:
import gzip as gzip
global gzip
except ImportError, e:
self.log.warn(
"gzip module missing, gzip compression not available"
)
return None
class BZ2Compression(Compression):
def __init__(self):
Compression.__init__(self)
try:
import bz2
from bz2 import BZ2File
global bz2
except ImportError, e:
self.log.warn(
'bz2 module missing, bzip2 compression not available'
)
return None
class IOCompressionStream(Compression):
def __init__(self, **kwargs):
self.log = _helpers.get_logger(__name__)
Compression.__init__(self)
self.stream = None
self.output_path = kwargs.get('output_path', None)
assert not os.path.exists(self.output_path), \
'%s already exists!' % self.output_path
_helpers.ensure_dir(os.path.dirname(self.output_path))
def open_stream(self):
"""
Open a compressed stream for writing. Must be subclassed.
"""
raise "compression.IOCompressionStream.open_stream must be subclassed."
def close_stream(self):
"""
Close a compression stream.
"""
raise "compression.IOCompressionStream.close_stream must be subclassed."
class GzipIOCompressionStream(IOCompressionStream,GzipCompression):
def __init__(self, **kwargs):
self.log = _helpers.get_logger(__name__)
IOCompressionStream.__init__(self, **kwargs)
GzipCompression.__init__(self)
def open_stream(self):
"""
Open a Gzip compression stream for writing. Returns an IO file
handle object.
"""
self.stream = gzip.open(self.output_path, 'w')
self.log.debug('%s gzip io stream opened for writing' % self.stream)
return self.stream
def close_stream(self):
"""
Close a Gzip compression stream.
"""
# FIX ME: not sure what to catch here
self.stream.close()
self.log.debug('%s gzip io stream closed.' % self.stream)
return True
class BZ2IOCompressionStream(IOCompressionStream,BZ2Compression):
def __init__(self, **kwargs):
self.log = _helpers.get_logger(__name__)
IOCompressionStream.__init__(self, **kwargs)
BZ2Compression.__init__(self)
def open_stream(self):
"""
Open a Bzip2 compression stream for writing. Returns an IO file
handle object.
"""
self.stream = bz2.BZ2File(self.output_path, 'w')
self.log.debug(
'%s bzip2 io stream opened for writing' % self.output_path
)
return self.stream
def close_stream(self):
"""
Close a Bzip2 compression stream.
"""
# FIX ME: not sure what to catch here
self.stream.close()
self.log.debug('%s bzip2 io stream closed.' % self.output_path)
return True
class FileCompression(Compression):
def __init__(self, **kwargs):
Compression.__init__(self)
self.log = _helpers.get_logger(__name__)
self.source_path = kwargs.get('source_path', None)
self.dest_dir = kwargs.get(
'dest_dir', '%s/' % os.path.dirname(self.source_path)
)
self.compressed_path = None
self.remove_source = kwargs.get('remove_source', False)
self.compress_success = None
self.decompress_success = None
self.compressed_path = None
self.decompressed_path = None
# validations
assert self.source_path, '%s missing'
assert isinstance(self.remove_source, bool), \
"remove_source must be True/False"
assert os.path.exists(self.source_path), \
'%s does not exist, skipping compression' % self.source_path
# real paths please
self.source_path = os.path.realpath(self.source_path)
self.dest_dir = os.path.realpath(self.dest_dir)
_helpers.ensure_dir(self.dest_dir)
def compress(self):
"""
Call all methods to perform compression.
"""
self._pre_compress()
self._compress_path()
self._post_compress()
if self.compress_success:
return self.compressed_path
else:
return None
def decompress(self):
"""
Call all methods to perform decompression.
"""
self._pre_decompress()
self._decompress_path()
self._post_decompress()
if self.decompress_success:
return self.decompressed_path
else:
return None
def _pre_compress(self):
"""
This method is run before compression.
"""
pass
def _post_compress(self):
"""
This method is run after compression.
"""
if self.remove_source:
self._remove_source_path()
def _pre_decompress(self):
"""
This method is run before decompression.
"""
pass
def _post_decompress(self):
"""
This method is run after decompression.
"""
if self.remove_source:
self._remove_source_path()
def _compress_path(self):
"""
Compress directories or files. Must be subclassed.
"""
self.log.warn('_compress_path must be subclassed')
def _decompress_path(self):
"""
De-compress directories or files. Must be subclassed.
"""
pass
def _remove_source_path(self):
# FIX ME: need better checks here... once we have a config to check
# only delete if the file exists within the holland path or something?
assert self.dest_dir != '/', 'trying to remove / (root)?'
#try:
# rmtree(self.source_path)
# self.log.info('removed path %s' % self.source_path)
#except IOError, e:
# self.log.error('failed to remove %s: %s' % (self.source_path, e))
self.log.warn(
'FIX ME -> compression.Compression._remove_source_path need ' +\
'to properly write this method.'
)
if os.path.isfile(self.source_path):
os.remove(self.source_path)
elif os.path.isdir(self.source_path):
rmtree(self.source_path)
self.log.debug('removed path %s' % self.source_path)
class GzipFileCompression(FileCompression,GzipCompression):
def __init__(self, **kwargs):
FileCompression.__init__(self, **kwargs)
GzipCompression.__init__(self)
self.log = _helpers.get_logger(__name__)
def _compress_path(self):
"""
Compress directories or files using Gzip/Zlib libraries.
"""
if os.path.isfile(self.source_path):
self.compressed_path = os.path.join(
self.dest_dir, "%s.gz" % os.path.basename(self.source_path)
)
try:
f_in = open(self.source_path, "r")
f_out = gzip.open(self.compressed_path, "w")
f_out.write(f_in.read())
f_in.close()
f_out.close()
self.log.debug(
"%s gzip'd as %s" % ( self.source_path,
self.compressed_path )
)
self.compress_success = True
except IOError, e:
self.log.debug("failed to gzip %s" % self.source_path)
elif os.path.isdir(self.source_path):
self.compressed_path = os.path.join(
self.dest_dir, "%s.tar.gz" % \
os.path.basename(self.source_path)
)
try:
t = tarfile.open(name=self.compressed_path, mode = 'w:gz')
t.add(self.source_path)
t.close()
self.log.debug(
"%s gzip'd as %s" % ( self.source_path,
self.compressed_path )
)
self.compress_success = True
except IOError, e:
self.log.debug("failed to gzip %s" % self.source_path)
else:
self.log.warn(
'%s is not a regular file/directory. ignoring compression' %\
self.source_path
)
def _decompress_path(self):
"""
De-compress directories or files using Gzip/Zlib libraries.
"""
self.decompressed_path = os.path.join(
self.dest_dir, os.path.basename(self.source_path)
)
if self.decompressed_path.endswith('\.tar.gz'):
self.decompressed_path = self.decompressed_path.split('.gz')[0]
elif self.decompressed_path.endswith('\.gz'):
self.decompressed_path = self.decompressed_path.split('.gz')[0]
elif self.decompressed_path.endswith('\.gz'):
self.decompressed_path = self.decompressed_path.split('.gzip')[0]
self.decompressed_path = _helpers.protected_path(
self.decompressed_path
)
try:
f_in = gzip.open(self.source_path, "r")
f_out = open(self.decompressed_path, "w")
f_out.write(f_in.read())
f_in.close()
f_out.close()
# is it a tar?
if tarfile.is_tarfile(self.decompressed_path):
tar_file = self.decompressed_path
self.decompressed_path = self.decompressed_path.split('.tar')[0]
self.decompressed_path = _helpers.protected_path(
self.decompressed_path
)
try:
t = tarfile.open(name=tar_file, mode = 'r:')
t.extractall(self.decompressed_path)
t.close()
os.remove(tar_file)
except IOError, e:
self.log.error(
"failed to untar %s (%s)" %\
(self.source_path, e)
)
self.log.debug(
"%s gunzip'd as %s" % ( self.source_path,
self.decompressed_path )
)
self.decompress_success = True
except IOError, e:
self.log.error("failed to gunzip %s (%s)" % (self.source_path, e))
class BZ2FileCompression(FileCompression,BZ2Compression):
def __init__(self, **kwargs):
FileCompression.__init__(self, **kwargs)
BZ2Compression.__init__(self)
self.log = _helpers.get_logger(__name__)
def _compress_path(self):
"""
Compress directories or files using bz2(Bzip2) libraries.
"""
if os.path.isfile(self.source_path):
self.compressed_path = os.path.join(
self.dest_dir, "%s.bz2" % os.path.basename(self.source_path)
)
try:
f_in = open(self.source_path, "r")
f_out = bz2.BZ2File(self.compressed_path, "w")
f_out.write(f_in.read())
f_in.close()
f_out.close()
self.log.debug(
"%s bzip2'd as %s" % ( self.source_path,
self.compressed_path )
)
self.compress_success = True
except IOError, e:
self.log.error("failed to bzip2 %s" % self.source_path)
elif os.path.isdir(self.source_path):
self.compressed_path = os.path.join(
self.dest_dir, "%s.tar.bz2" % \
os.path.basename(self.source_path)
)
try:
t = tarfile.open(name=self.compressed_path, mode = 'w:bz2')
t.add(self.source_path)
t.close()
self.log.debug(
"%s bzip2'd as %s" % ( self.source_path,
self.compressed_path )
)
self.compress_success = True
except IOError, e:
self.log.error("failed to bzip2 %s" % self.source_path)
else:
self.log.warn(
'%s is not a regular file/directory. ignoring compression' %\
self.source_path
)
def _decompress_path(self):
"""
De-compress directories or files using bz2(Bzip2) libraries.
"""
self.decompressed_path = os.path.join(
self.dest_dir, '%s.bz2' % os.path.basename(self.source_path)
)
if self.decompressed_path.endswith('\.bz2'):
self.decompressed_path = self.decompressed_path.split('.bz2')[0]
elif self.decompressed_path.endswith('\.bzip2'):
self.decompressed_path = self.decompressed_path.split('.bzip2')[0]
self.decompressed_path = _helpers.protected_path(
self.decompressed_path
)
try:
f_in = bz2.BZ2File(self.source_path, "r")
f_out = open(self.decompressed_path, "w")
f_out.write(f_in.read())
f_in.close()
f_out.close()
# is it a tar?
if tarfile.is_tarfile(self.decompressed_path):
tar_file = self.decompressed_path
self.decompressed_path = self.decompressed_path.split('.tar')[0]
self.decompressed_path = _helpers.protected_path(
self.decompressed_path
)
try:
t = tarfile.open(name=tar_file, mode = 'r:')
t.extractall(self.decompressed_path)
t.close()
os.remove(tar_file)
except IOError, e:
self.log.error(
"failed to untar %s (%s)" %\
(self.source_path, e)
)
self.log.debug(
"%s bunzip'd as %s" % ( self.source_path,
self.decompressed_path )
)
self.decompress_success = True
except IOError, e:
self.log.error("failed to bunzip %s (%s)" % (self.source_path, e))
| {
"repo_name": "m00dawg/holland",
"path": "holland/legacy/compression.py",
"copies": "1",
"size": "15787",
"license": "bsd-3-clause",
"hash": 1919232541384582400,
"line_mean": 33.3217391304,
"line_max": 83,
"alpha_frac": 0.4937606892,
"autogenerated": false,
"ratio": 4.320470717022441,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.531423140622244,
"avg_score": null,
"num_lines": null
} |
# $Id$
import sys, re, os, Cookie, errno
try:
import cStringIO as StringIO
except ImportError:
import StringIO
"""Object-oriented CGI interface."""
class Error(Exception):
"""The base class for all exceptions thrown by this module."""
pass
class SequencingError(Error):
"""The exception thrown when functions are called out of order."""
"""
For example, if you try to call a function altering the headers of your
output when the headers have already been sent.
"""
pass
_url_encre = re.compile(r"[^A-Za-z0-9_.!~*()-]") # RFC 2396 section 2.3
_url_decre = re.compile(r"%([0-9A-Fa-f]{2})")
_html_encre = re.compile("[&<>\"'+]")
# '+' is encoded because it is special in UTF-7, which the browser may select
# automatically if the content-type header does not specify the character
# encoding. This is paranoia and is not bulletproof, but it does no harm. See
# section 4 of www.microsoft.com/technet/security/news/csoverv.mspx
_html_encodes = { "&": "&", "<": "<", ">": ">", "\"": """,
"'": "'", "+": "+" }
def html_encode(raw):
"""Return the string parameter HTML-encoded."""
"""
Specifically, the following characters are encoded as entities:
& < > " ' +
"""
if not isinstance(raw, (str, unicode)):
raw = str(raw)
return re.sub(_html_encre, lambda m: _html_encodes[m.group(0)], raw)
def url_encode(raw):
"""Return the string parameter URL-encoded."""
if not isinstance(raw, (str, unicode)):
raw = str(raw)
return re.sub(_url_encre, lambda m: "%%%02X" % ord(m.group(0)), raw)
def url_decode(enc):
"""Return the string parameter URL-decoded (including '+' -> ' ')."""
s = enc.replace("+", " ")
return re.sub(_url_decre, lambda m: chr(int(m.group(1), 16)), s)
__UNDEF__ = []
def traceback(req, html=0):
exc = sys.exc_info()
import cgitb
if html:
try:
req.clear_headers()
req.set_header("Status", "500 Internal Server Error")
req.set_header("Content-Type", "text/html; charset=iso-8859-1")
req.clear_output()
except SequencingError:
pass
cgitb.Hook(file=req)(*exc)
s = StringIO.StringIO()
cgitb.Hook(file=s, format="text")(*exc)
req.error(s.getvalue())
class Request(object):
"""All the information about a CGI-style request, including how to respond."""
"""Headers are buffered in a list before being sent. They are either sent
on request, or when the first part of the body is sent. If requested, the
body output can be buffered as well."""
def __init__(self, handler_type):
"""Create a Request object which uses handler_type as its handler."""
"""An object of type handler_type, which should be a subclass of
Handler, will be used to handle requests."""
self._handler_type = handler_type
def _init(self):
self._doneHeaders = 0
self._headers = []
self._bufferOutput = 1
self._output = StringIO.StringIO()
self._pos = 0
self.closed = 0
self._encoding = self._inputencoding = self._form_encoding = None
try:
del self.params
except AttributeError:
pass
self.cookies = Cookie.SimpleCookie()
if "HTTP_COOKIE" in self.environ:
self.cookies.load(self.environ["HTTP_COOKIE"])
self.aborted = 0
self.set_header("Content-Type", "text/html; charset=iso-8859-1")
def __getattr__(self, name):
if name == "params":
self.params = {}
self._read_cgi_data(self.environ, self.stdin)
return self.__dict__["params"]
raise AttributeError("%s instance has no attribute %s" %
(self.__class__.__name__, repr(name)))
def close(self):
"""Closes the output stream."""
if not self.closed:
self.flush()
self._close()
self.closed = 1
def _check_open(self):
if self.closed:
raise ValueError("I/O operation on closed file")
def output_headers(self):
"""Output the list of headers."""
self._check_open()
if self._doneHeaders:
raise SequencingError("output_headers() called twice")
self._write("".join(["%s: %s\r\n" % pair for pair in self._headers])
+ "\r\n")
self._doneHeaders = 1
def clear_headers(self):
"""Clear the list of headers."""
self._check_open()
if self._doneHeaders:
raise SequencingError("cannot clear_headers() after output_headers()")
self._headers = []
def add_header(self, hdr, val):
"""Add a header to the list of headers."""
self._check_open()
if self._doneHeaders:
raise SequencingError(
"cannot add_header(%s) after output_headers()" % repr(hdr))
self._headers.append((hdr, val))
def set_header(self, hdr, val):
"""Add a header to the list of headers, replacing any existing values."""
self._check_open()
if self._doneHeaders:
raise SequencingError(
"cannot set_header(%s) after output_headers()" % repr(hdr))
self.del_header(hdr)
self._headers.append((hdr, val))
def get_header(self, hdr, index=0):
"""Retrieve a header from the list of headers."""
i = 0
hdr = hdr.lower()
for pair in self._headers:
if pair[0].lower() == hdr:
if i == index:
return pair[1]
i += 1
return None
def del_header(self, hdr):
"""Removes all values for a header from the list of headers."""
self._check_open()
if self._doneHeaders:
raise SequencingError(
"cannot del_header(%s) after output_headers()" % repr(hdr))
hdr = hdr.lower()
while 1:
for s in self._headers:
if s[0].lower() == hdr:
self._headers.remove(s)
break
else:
break
def append_header_value(self, hdr, val):
"""Adds a value to a header that contains a comma-separated list of values.
If the value is already in the list, it is not added again. If the header
does not exist, it is created containing the single value specified."""
current = self.get_header(hdr)
if not current:
self.set_header(hdr, val)
return val
if val.lower() in [x.strip().lower() for x in current.split(",")]:
return current
val = current + ", " + val
self.set_header(hdr, val)
return val
def set_buffering(self, f):
"""Specifies whether or not body output is buffered."""
self._check_open()
if self._output.tell() > 0 and not f:
self.flush()
self._bufferOutput = f
def set_encoding(self, encoding, inputencoding=__UNDEF__):
self._encoding = encoding
if inputencoding is not __UNDEF__:
self._inputencoding = inputencoding
def get_encoding(self):
return self._encoding
def set_form_encoding(self, encoding):
self._form_encoding = encoding
def get_form_encoding(self):
return self._form_encoding
def flush(self):
"""Flushes the body output."""
self._check_open()
if not self._doneHeaders:
self.output_headers()
self._write(self._output.getvalue())
self._pos += self._output.tell()
self._output.seek(0, 0)
self._output.truncate()
self._flush()
def clear_output(self):
"""Discards the contents of the body output buffer."""
self._check_open()
if not self._bufferOutput:
raise SequencingError("cannot clear output when not buffering")
self._output.seek(0, 0)
self._output.truncate()
def error(self, s):
"""Records an error message from the program."""
"""The output is logged or otherwise stored on the server. It does not
go to the client.
Must be overridden by the sub-class."""
raise NotImplementedError("error must be overridden")
def _write(self, s):
"""Sends some data to the client."""
"""Must be overridden by the sub-class."""
raise NotImplementedError("_write must be overridden")
def _flush(self):
"""Flushes data to the client."""
"""May be overridden by the sub-class."""
pass
def _close(self):
"""Closes the output stream."""
"""May be overridden by the sub-class."""
pass
def write(self, s):
"""Sends some data to the client."""
self._check_open()
if self._encoding:
if not isinstance(s, unicode):
if self._inputencoding:
s = unicode(s, self._inputencoding)
else:
s = unicode(s)
s = s.encode(self._encoding)
else:
s = str(s)
if self._bufferOutput:
self._output.write(s)
else:
if not self._doneHeaders:
self.output_headers()
self._pos += len(s)
self._write(s)
def tell(self):
return self._pos + self._output.tell()
def seek(self, offset, whence=0):
self._check_open()
currentpos = self._pos + self._output.tell()
currentlen = self._pos + len(self._output.getvalue())
if whence == 0:
newpos = offset
elif whence == 1:
newpos = currentpos + offset
elif whence == 2:
newpos = currentlen + offset
else:
raise ValueError("Bad 'whence' argument to seek()")
if newpos == currentpos:
return
elif newpos < self._pos:
raise ValueError("Cannot seek backwards into already-sent data")
elif newpos <= currentlen:
self._output.seek(newpos - self._pos)
else:
if self._bufferOutput:
self._output.seek(newpos - self._pos)
else:
self._write("\0" * (newpos - self._pos))
def _mergevars(self, encoded):
"""Parse variable-value pairs from a URL-encoded string."""
"""Extract the variable-value pairs from the URL-encoded input string and
merge them into the output dictionary. Variable-value pairs are separated
from each other by the '&' character. Missing values are allowed.
If the variable name ends with a '*' character, then the value that is
placed in the dictionary will be a list. This is useful for multiple-value
fields."""
for pair in encoded.split("&"):
if pair == "":
continue
nameval = pair.split("=", 1)
name = url_decode(nameval[0])
if self._form_encoding:
name = name.decode(self._form_encoding)
if len(nameval) > 1:
val = url_decode(nameval[1])
if self._form_encoding:
val = val.decode(self._form_encoding)
else:
val = None
if name.endswith("!") or name.endswith("!*"):
continue
if name.endswith("*"):
if name in self.params:
self.params[name].append(val)
else:
self.params[name] = [val]
else:
self.params[name] = val
def _mergemime(self, contenttype, encoded):
"""Parses variable-value pairs from a MIME-encoded input stream."""
"""Extract the variable-value pairs from the MIME-encoded input file and
merge them into the output dictionary.
If the variable name ends with a '*' character, then the value that is
placed in the dictionary will be a list. This is useful for multiple-value
fields. If the variable name ends with a '!' character (before the '*' if
present) then the value will be a mime.Entity object."""
import mime
headers = "Content-Type: %s\n" % contenttype
for entity in mime.Entity(encoded.read(), mime=1, headers=headers).entities:
if not entity.content_disposition:
continue
if entity.content_disposition[0] != 'form-data':
continue
name = entity.content_disposition[1].get("name")
if self._form_encoding:
name = mime.decodeword(name)
if name[-1:] == "!" or name[-2:] == "!*":
value = entity
else:
value = entity.body
if self._form_encoding:
value = value.decode(self._form_encoding)
if name[-1:] == "*":
self.params.setdefault(name, []).append(value)
else:
self.params[name] = value
def _read_cgi_data(self, environ, inf):
"""Read input data from the client and set up the object attributes."""
if "QUERY_STRING" in environ:
self._mergevars(environ["QUERY_STRING"])
if environ.get("REQUEST_METHOD") == "POST":
if environ.get("CONTENT_TYPE", "").startswith("multipart/form-data"):
self._mergemime(environ["CONTENT_TYPE"], inf)
else:
self._mergevars(inf.read(int(environ.get("CONTENT_LENGTH", "-1"))))
def traceback(self):
traceback(self)
try:
self.clear_headers()
self.set_header("Status", "500 Internal Server Error")
self.set_header("Content-Type", "text/html; charset=iso-8859-1")
self.clear_output()
except SequencingError:
pass
self.write("""\
<html><head><title>Error</title></head>
<body><h1>Error</h1>
<p>Sorry, an error occurred. Please try again later.</p>
</body></html>""")
class GZipMixIn(object):
def _init(self, *args, **kwargs):
self._gzip = None
self._gzip_level = 6
super(GZipMixIn, self)._init(*args, **kwargs)
def _close(self):
parent = super(GZipMixIn, self)
if self._gzip:
import struct
parent._write(self._gzip.flush(self._gzip_zlib.Z_FINISH))
parent._write(
struct.pack("<II", self._gzip_crc & 0xffffffff, self._gzip_length))
parent._flush()
self._gzip = None
parent._close()
def gzip_level(self, level=6):
"""Enable/disable gzip output compression."""
if self._gzip_level == level:
return
if self._doneHeaders:
raise SequencingError("Cannot adjust compression - headers already sent")
self._gzip_level = level
def _write(self, s):
parent = super(GZipMixIn, self)
if not self._gzip:
parent._write(s)
return
self._gzip_crc = self._gzip_zlib.crc32(s, self._gzip_crc)
self._gzip_length += len(s)
parent._write(self._gzip.compress(s))
def output_headers(self):
parent = super(GZipMixIn, self)
if self._gzip_level == 0:
parent.output_headers()
return
self.append_header_value("Vary", "Accept-Encoding")
gzip_ok = 0
if "HTTP_ACCEPT_ENCODING" in self.environ:
encodings = [[a.strip() for a in x.split(";", 1)]
for x in self.environ["HTTP_ACCEPT_ENCODING"].split(",")]
for encoding in encodings:
if encoding[0].lower() == "gzip":
if len(encoding) == 1:
gzip_ok = 1
break
else:
q = [x.strip() for x in encoding[1].split("=")]
if len(q) == 2 and q[0].lower() == "q" and q[1] != "0":
gzip_ok = 1
break
if gzip_ok:
try:
import zlib
self.append_header_value("Content-Encoding", "gzip")
self.del_header("Content-Length")
parent.output_headers()
self._gzip = zlib.compressobj(self._gzip_level, 8, -15)
self._gzip_zlib = zlib
self._gzip_crc = self._gzip_length = 0
parent._write("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03")
return
except ImportError:
pass
parent.output_headers()
def _flush(self):
parent = super(GZipMixIn, self)
if self._gzip:
parent._write(self._gzip.flush(self._gzip_zlib.Z_SYNC_FLUSH))
parent._flush()
class CGIRequest(Request):
"""An implementation of Request which uses the standard CGI interface."""
def _init(self):
self.__out = sys.stdout
self.__err = sys.stderr
self.environ = os.environ
self.stdin = sys.stdin
super(CGIRequest, self)._init()
def process(self):
"""Read the CGI input and create and run a handler to handle the request."""
self._init()
try:
handler = self._handler_type()
except:
self.traceback()
else:
try:
handler.process(self)
except:
handler.traceback(self)
self.close()
def error(self, s):
self.__err.write(s)
def _close(self):
self.__out.close()
def _write(self, s):
if not self.aborted:
try:
self.__out.write(s)
except IOError, x:
# Ignore EPIPE, caused by the browser having gone away
if x[0] != errno.EPIPE:
raise
self.aborted = 1
def _flush(self):
if not self.aborted:
try:
self.__out.flush()
except IOError, x:
# Ignore EPIPE, caused by the browser having gone away
if x[0] != errno.EPIPE:
raise
self.aborted = 1
class GZipCGIRequest(GZipMixIn, CGIRequest):
pass
class Handler(object):
"""Handle a request."""
def process(self, req):
"""Handle a request. req is a Request object."""
raise NotImplementedError("handler process function must be overridden")
def traceback(self, req):
"""Display a traceback, req is a Request object."""
req.traceback()
class DebugHandlerMixIn(object):
def traceback(self, req):
"""Display a traceback, req is a Request object."""
traceback(req, html=1)
class DebugHandler(DebugHandlerMixIn, Handler):
pass
| {
"repo_name": "jribbens/jonpy",
"path": "jon/cgi.py",
"copies": "1",
"size": "16681",
"license": "mit",
"hash": -3223970739280333300,
"line_mean": 29.5512820513,
"line_max": 80,
"alpha_frac": 0.6169294407,
"autogenerated": false,
"ratio": 3.6872236958443856,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48041531365443857,
"avg_score": null,
"num_lines": null
} |
# $Id$
import time, hmac, Cookie, re, random, os, errno, fcntl
try:
import hashlib
sha = hashlib.sha1
shanew = hashlib.sha1
except ImportError:
import sha
shanew = sha.new
try:
import cPickle as pickle
except ImportError:
import pickle
class Error(Exception):
pass
class Session(dict):
def _make_hash(self, sid, secret):
"""Create a hash for 'sid'
This function may be overridden by subclasses."""
return hmac.new(secret, sid, sha).hexdigest()[:8]
def _create(self, secret):
"""Create a new session ID and, optionally hash
This function must insert the new session ID (which must be 8 hexadecimal
characters) into self["id"].
It may optionally insert the hash into self["hash"]. If it doesn't, then
_make_hash will automatically be called later.
This function may be overridden by subclasses.
"""
rnd = str(time.time()) + str(random.random()) + \
str(self._req.environ.get("UNIQUE_ID"))
self["id"] = shanew(rnd).hexdigest()[:8]
def _load(self):
"""Load the session dictionary from somewhere
This function may be overridden by subclasses.
It should return 1 if the load was successful, or 0 if the session could
not be found. Any other type of error should raise an exception as usual."""
return 1
def save(self):
"""Save the session dictionary to somewhere
This function may be overridden by subclasses."""
pass
def tidy():
pass
tidy = staticmethod(tidy)
def __init__(self, req, secret, cookie="jonsid", url=0, root="",
referer=None, sid=None, shash=None, secure=0, domain=None, create=True,
samesite=None):
dict.__init__(self)
self["id"] = None
self._req = req
self._secret = secret
self.cookie = cookie
self.url = url
self.root = root
self.secure = secure
self.domain = domain
self.samesite = samesite
self.relocated = 0
self.new = 0
# try and determine existing session id
if sid is not None:
self["id"] = sid
if shash is None:
self["hash"] = self._make_hash(self["id"], self._secret)
else:
self["hash"] = shash
if self["hash"] != self._make_hash(self["id"], self._secret):
self["id"] = None
if self.cookie and self.cookie in self._req.cookies:
self["id"] = self._req.cookies[self.cookie].value[:8]
self["hash"] = self._req.cookies[self.cookie].value[8:]
if self["hash"] != self._make_hash(self["id"], self._secret):
self["id"] = None
if self.url:
for i in range(1, 4):
self._requrl = self._req.environ.get("REDIRECT_" * i + "SESSION")
if self._requrl:
break
if self._requrl and self["id"] is None:
self["id"] = self._requrl[:8]
self["hash"] = self._requrl[8:]
if self["hash"] != self._make_hash(self["id"], self._secret):
self["id"] = None
# check the session
if referer:
if "HTTP_REFERER" in self._req.environ:
if self._req.environ["HTTP_REFERER"].find(referer) == -1:
self["id"] = None
# try and load the session
if self["id"] is not None:
if not self._load():
self["id"] = None
# if no session was available and loaded, create a new one
if create and self["id"] is None:
self.create()
def create(self):
if "hash" in self:
del self["hash"]
self.created = time.time()
self.new = 1
self._create(self._secret)
if "hash" not in self:
self["hash"] = self._make_hash(self["id"], self._secret)
if self.cookie:
c = Cookie.SimpleCookie()
c[self.cookie] = self["id"] + self["hash"]
c[self.cookie]["path"] = self.root + "/"
if self.secure:
c[self.cookie]["secure"] = 1
if self.domain:
c[self.cookie]["domain"] = self.domain
self._req.add_header(
"Set-Cookie",
c[self.cookie].OutputString() +
(("; SameSite=" + self.samesite) if self.samesite else "")
)
# if using url-based sessions, redirect if necessary
if self.url:
requrl = self._requrl
if not requrl or self["id"] != requrl[:8] or self["hash"] != requrl[8:]:
requrl = self._req.environ["REQUEST_URI"][len(self.root):]
requrl = re.sub("^/[A-Fa-f0-9]{16}/", "/", requrl)
self._req.add_header("Location", "http://" +
self._req.environ["SERVER_NAME"] + self.root + "/" + self["id"] +
self["hash"] + requrl)
self.relocated = 1
self.surl = self.root + "/" + self["id"] + self["hash"] + "/"
class MemorySession(Session):
_sessions = {}
def _create(self, secret):
while 1:
Session._create(self, secret)
if self["id"] in self._sessions:
continue
self._sessions[self["id"]] = {"created": self.created,
"updated": self.created, "data": {}}
break
def _load(self):
try:
sess = self._sessions[self["id"]]
except KeyError:
return 0
self.created = sess["created"]
self.update(sess["data"])
return 1
def save(self):
sess = self._sessions[self["id"]]
sess["updated"] = time.time()
sess["data"] = self.copy()
def tidy(cls, max_idle=0, max_age=0):
now = time.time()
for k in cls._sessions:
if (max_age and k["created"] < now - max_age) or \
(max_idle and k["updated"] < now - max_idle):
del cls._sessions[k]
tidy = classmethod(tidy)
class FileSession(Session):
def _create(self, secret):
while 1:
Session._create(self, secret)
try:
os.lstat("%s/%s" % (self.basedir, self["id"][:2]))
except OSError, x:
if x[0] == errno.ENOENT:
os.mkdir("%s/%s" % (self.basedir, self["id"][:2]), 0700)
try:
fd = os.open("%s/%s/%s" % (self.basedir, self["id"][:2],
self["id"][2:]), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0700)
except OSError, x:
if x[0] != errno.EEXIST:
raise
continue
f = os.fdopen(fd, "wb")
f.write("%d\n" % self.created)
pickle.dump({}, f, 1)
f.flush()
break
def _load(self):
try:
f = open("%s/%s/%s" % (self.basedir, self["id"][:2], self["id"][2:]),
"r+b")
except IOError, x:
if x[0] != errno.ENOENT:
raise
return 0
fcntl.lockf(f.fileno(), fcntl.LOCK_EX)
self.created = int(f.readline().strip())
self.update(pickle.load(f))
return 1
def save(self):
f = open("%s/%s/%s" % (self.basedir, self["id"][:2], self["id"][2:]), "r+b")
fcntl.lockf(f.fileno(), fcntl.LOCK_EX)
f.write("%d\n" % self.created)
pickle.dump(self.copy(), f, 1)
f.flush()
f.truncate()
def tidy(cls, max_idle=0, max_age=0, basedir=None):
if not max_idle and not max_age:
return
basedir = cls._find_basedir(basedir)
now = time.time()
for d in os.listdir(basedir):
if len(d) != 2 or not d.isalnum():
continue
for f in os.listdir("%s/%s" % (basedir, d)):
if len(f) != 6 or not f.isalnum():
continue
p = "%s/%s/%s" % (basedir, d, f)
if (max_idle and os.lstat(p).st_mtime < now - max_idle) or \
(max_age and int(open(p, "rb").readline().strip()) < now - max_age):
os.remove(p)
tidy = classmethod(tidy)
def _find_basedir(basedir):
if basedir is None:
basedir = os.environ.get("TMPDIR", "/tmp")
while basedir[-1] == "/":
basedir = basedir[:-1]
basedir = "%s/jon-sessions-%d" % (basedir, os.getuid())
try:
st = os.lstat(basedir)
if st[4] != os.getuid():
raise Error("Sessions basedir is not owned by user %d" % os.getuid())
except OSError, x:
if x[0] == errno.ENOENT:
os.mkdir(basedir, 0700)
return basedir
_find_basedir = staticmethod(_find_basedir)
def __init__(self, req, secret, basedir=None, **kwargs):
self.basedir = self._find_basedir(basedir)
Session.__init__(self, req, secret, **kwargs)
class GenericSQLSession(Session):
def _create(self, secret):
while 1:
Session._create(self, secret)
self["hash"] = self._make_hash(self["id"], secret)
try:
self.dbc.execute("INSERT INTO %s (ID,hash,created,updated,data)"
" VALUES (%%s,%%s,%%s,%%s,%%s)" % (self.table,),
(self["id"], self["hash"], int(self.created), int(self.created),
pickle.dumps({}, 1)))
self.dbc.execute("COMMIT")
except self.dbc.IntegrityError:
pass
else:
break
def _load(self):
self.dbc.execute("SELECT created,data FROM %s WHERE ID=%%s" % (self.table,),
(self["id"],))
if self.dbc.rowcount == 0:
return 0
row = self.dbc.fetchone()
self.created = row[0]
self.update(pickle.loads(row[1]))
return 1
def save(self):
self.dbc.execute("UPDATE %s SET updated=%%s,data=%%s"
" WHERE ID=%%s" % (self.table,), (int(time.time()),
pickle.dumps(self.copy(), 1), self["id"]))
self.dbc.execute("COMMIT")
def tidy(dbc, table="sessions", max_idle=0, max_age=0):
now = time.time()
if max_idle:
dbc.execute("DELETE FROM %s WHERE updated < %%s" % (table,),
(now - max_idle,))
if max_age:
dbc.execute("DELETE FROM %s WHERE created < %%s" % (table,),
(now - max_age,))
if max_idle or max_age:
dbc.execute("COMMIT")
tidy = staticmethod(tidy)
def __init__(self, req, secret, dbc, table="sessions", **kwargs):
self.dbc = dbc
self.table = table
Session.__init__(self, req, secret, **kwargs)
class MySQLSession(GenericSQLSession):
def _create(self, secret):
self.dbc.execute("LOCK TABLES %s WRITE" % (self.table,))
while 1:
Session._create(self, secret)
self.dbc.execute("SELECT 1 FROM %s WHERE ID=%%s" % (self.table,),
(long(self["id"], 16),))
if self.dbc.rowcount == 0:
break
self["hash"] = self._make_hash(self["id"], secret)
self.dbc.execute("INSERT INTO %s (ID,hash,created,updated,data) VALUES " \
"(%%s,%%s,%%s,%%s,%%s)" % (self.table,),
(long(self["id"], 16), self["hash"], int(self.created),
int(self.created), pickle.dumps({}, 1)))
self.dbc.execute("UNLOCK TABLES")
def _load(self):
self.dbc.execute("SELECT created,data FROM %s WHERE ID=%%s" % (self.table,),
(long(self["id"], 16),))
if self.dbc.rowcount == 0:
return 0
row = self.dbc.fetchone()
self.created = row[0]
self.update(pickle.loads(row[1]))
return 1
def save(self):
self.dbc.execute("UPDATE %s SET updated=%%s,data=%%s"
" WHERE ID=%%s" % (self.table,), (int(time.time()),
pickle.dumps(self.copy(), 1), long(self["id"], 16)))
SQLSession = MySQLSession # backwards compatibility name
| {
"repo_name": "jribbens/jonpy",
"path": "jon/session.py",
"copies": "1",
"size": "10737",
"license": "mit",
"hash": -2936696600022355000,
"line_mean": 29.0756302521,
"line_max": 80,
"alpha_frac": 0.5734376455,
"autogenerated": false,
"ratio": 3.331368290412659,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4404805935912659,
"avg_score": null,
"num_lines": null
} |
# $Id$
import traceback
import canary.context
from canary.utils import DTable
import dtuple
def find_references (context, token=''):
if not token:
return get_studies(context)
studies = []
cursor = context.get_cursor()
cursor.execute("""
SELECT uid
FROM human_studies
WHERE reference LIKE %s
ORDER BY reference
""", '%s%s' % (token, '%'))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
hs = HumanStudy(context, row['uid'])
studies.append(hs)
return studies
def get_studies (context):
studies = []
cursor = context.get_cursor()
cursor.execute("""
SELECT uid
FROM human_studies
ORDER BY reference
""")
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
hs = HumanStudy(context, row['uid'])
studies.append(hs)
return studies
class HumanStudy (canary.context.Cacheable, DTable):
CACHE_KEY = 'human_study'
CACHE_CHECK_KEY = 'reference'
def __init__ (self, context=None, uid=-1):
try:
if getattr(self, self.CACHE_CHECK_KEY):
return
except AttributeError:
pass
self.uid = uid
self.reference = ''
self.comments = ''
def load (self, context):
if self.uid == -1:
return
# Is it already loaded? Convenience check for client calls
# don't need to verify loads from the cache.
if context.config.use_cache:
try:
if getattr(self, self.CACHE_CHECK_KEY):
# Already loaded
return
except AttributeError:
# Note already loaded, so continue
pass
cursor = context.get_cursor()
cursor.execute("""
SELECT reference, comments
FROM human_studies
WHERE uid = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
row = cursor.fetchone()
row = dtuple.DatabaseTuple(desc, row)
self.reference = row['reference']
self.comments = row['comments']
if context.config.use_cache:
context.cache_set('%s:%s' % (self.CACHE_KEY, self.uid), self)
def save (self, context):
cursor = context.get_cursor()
try:
if self.uid == -1:
cursor.execute("""
INSERT INTO human_studies
(reference, comments)
VALUES (%s, %s)
""", (self.reference, self.comments))
self.uid = self.get_new_uid(context)
context.logger.info('HumanStudy created with uid %s', self.uid)
else:
cursor.execute("""
UPDATE human_studies
SET reference = %s, comments = %s
WHERE uid = %s
""", (self.reference, self.comments, self.uid))
context.logger.info('HumanStudy %s updates', self.uid)
except:
print traceback.print_exc()
raise Error, 'Duplicate reference'
if context.config.use_cache:
context.cache_set('%s:%s' % (self.CACHE_KEY, self.uid), self)
def delete (self, context):
cursor = context.get_cursor()
try:
# First, delete from summary_human_refs
cursor.execute("""
DELETE FROM summary_human_refs
WHERE human_study_id = %s
""", self.uid)
cursor.execute("""
DELETE FROM human_studies
WHERE uid = %s
""", self.uid)
if context.config.use_cache:
context.cache_delete('%s:%s' % (self.CACHE_KEY, self.uid))
except Exception, e:
context.logger.error(e)
| {
"repo_name": "dchud/sentinel",
"path": "canary/human_study.py",
"copies": "1",
"size": "4326",
"license": "mit",
"hash": 3048566577246558000,
"line_mean": 29.0416666667,
"line_max": 79,
"alpha_frac": 0.5166435506,
"autogenerated": false,
"ratio": 4.2536873156342185,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5270330866234219,
"avg_score": null,
"num_lines": null
} |
# $Id$
import weakref as _weakref
import Queue as _Queue
import thread as _thread
import time as _time
import atexit as _atexit
_log_level = 0
_log_name = "/tmp/dbpool.log"
_log_file = None
_log_lock = _thread.allocate_lock()
apilevel = "2.0"
threadsafety = 2
_dbmod = None
_lock = _thread.allocate_lock()
_refs = {}
_COPY_ATTRS = ("paramstyle", "Warning", "Error", "InterfaceError",
"DatabaseError", "DataError", "OperationalError", "IntegrityError",
"InternalError", "ProgrammingError", "NotSupportedError")
def _log(level, message, *args, **kwargs):
global _log_file
if _log_level >= level:
if args or kwargs:
argslist = [repr(arg) for arg in args]
argslist.extend("%s=%r" % item for item in kwargs.items())
message += "(" + ", ".join(argslist) + ")"
_log_lock.acquire()
try:
if not _log_file:
_log_file = open(_log_name, "a", 1)
_log_file.write("%s %s\n" % (_time.strftime("%b %d %H:%M:%S"), message))
finally:
_log_lock.release()
def set_database(dbmod, minconns, timeout=0, postconnect=None):
if minconns < 1:
raise ValueError("minconns must be greater than or equal to 1")
if _dbmod is not None:
if _dbmod is dbmod:
return
raise Exception("dbpool module is already in use")
if len(dbmod.apilevel) != 3 or dbmod.apilevel[:2] != "2." or \
not dbmod.apilevel[2].isdigit():
raise ValueError("specified database module is not DB API 2.0 compliant")
if dbmod.threadsafety < 1:
raise ValueError("specified database module must have threadsafety level"
" of at least 1")
_log(1, "set_database", dbmod.__name__, minconns, timeout)
g = globals()
g["_dbmod"] = dbmod
g["_available"] = {}
g["_minconns"] = minconns
g["_timeout"] = timeout
g["_postconnect"] = postconnect
for v in _COPY_ATTRS:
g[v] = getattr(dbmod, v)
def connect(*args, **kwargs):
if _dbmod is None:
raise Exception("No database module has been specified")
key = repr(args) + "\0" + repr(kwargs)
_log(1, "connect", *args, **kwargs)
try:
while True:
conn = _available[key].get(0)
if _timeout == 0 or _time.time() - conn._lastuse < _timeout:
_log(2, "connect: returning connection %r from _available" % conn)
return conn
else:
conn._inner._connection = None
_log(2, "connect: discarded connection %r from _available due to age" %
conn)
except (KeyError, _Queue.Empty):
conn = _Connection(None, None, *args, **kwargs)
_log(2, "connect: created new connection %r" % conn)
return conn
def _make_available(conn):
key = repr(conn._args) + "\0" + repr(conn._kwargs)
_log(2, "_make_available", conn)
_lock.acquire()
try:
try:
_available[key].put(conn, 0)
_log(3, "_make_available: put into existing _available slot")
except KeyError:
_log(3, "_make_available: created new _available slot")
q = _Queue.Queue(_minconns)
q.put(conn, 0)
_available[key] = q
except _Queue.Full:
conn._inner._connection = None
_log(3, "_make_available: discarded, _available slot full")
finally:
_lock.release()
def _connection_notinuse(ref):
# if the Python interpreter is exiting, the globals might already have
# been deleted, so check for them explicitly
if _refs is None:
return
inner = _refs[ref]
del _refs[ref]
inner._cursorref = None
if inner._connection is not None:
if _make_available is not None and _Connection is not None:
_make_available(_Connection(inner))
class _Connection(object):
def __init__(self, inner, *args, **kwargs):
self._inner = None
_log(4, "_Connection", self, inner, *args, **kwargs)
if inner is None:
self._inner = _InnerConnection(*args, **kwargs)
_log(5, "_Connection: new inner=%r" % self._inner)
else:
self._inner = inner
self._inner._outerref = _weakref.ref(self)
ref = _weakref.ref(self, _connection_notinuse)
_log(5, "_Connection: ref=%r" % ref)
_refs[ref] = self._inner
def __repr__(self):
return "<dbpool._Connection(%r) at %x>" % (self._inner, id(self))
def cursor(self, *args, **kwargs):
# this method would not be necessary (i.e. the __getattr__ would take
# care of it) but if someone does dbpool.connect().cursor() all in one
# expression, the outer _Connection class was getting garbage-collected
# (and hence the actual database connection being put back in the pool)
# *in the middle of the expression*, i.e. after connect() was called but
# before cursor() was called. So you could end up with 2 cursors on the
# same database connection.
return self._inner.cursor(*args, **kwargs)
def __getattr__(self, attr):
return getattr(self._inner, attr)
class _InnerConnection(object):
def __init__(self, connection, *args, **kwargs):
self._connection = None
_log(4, "_InnerConnection", self, connection, *args, **kwargs)
self._args = args
self._kwargs = kwargs
if connection is None:
_log(2, "_InnerConnection: Calling actual connect", *args, **kwargs)
self._connection = _dbmod.connect(*args, **kwargs)
if _postconnect:
_postconnect(self._connection, *args, **kwargs)
else:
_log(5, "_InnerConnection: Re-using connection %r" % connection)
self._connection = connection
self._cursorref = None
self._outerref = None
self._lock = _thread.allocate_lock()
self._lastuse = _time.time()
def __repr__(self):
return "<dbpool._InnerConnection(%r) at %x>" % (self._connection, id(self))
def close(self):
_log(3, "_Connection.close", self)
if self._cursorref is not None:
c = self._cursorref()
if c is not None:
_log(4, "_Connection.close: closing cursor %r" % c)
c.close()
self._cursorref = None
self._outerref = None
conn = self._connection
if conn:
self._connection = None
if _make_available is not None:
_make_available(_Connection(None, conn, *self._args, **self._kwargs))
def __getattr__(self, attr):
return getattr(self._connection, attr)
def cursor(self, *args, **kwargs):
_log(3, "cursor", self, *args, **kwargs)
if _timeout == 0 or _time.time() - self._lastuse < _timeout:
self._lock.acquire()
try:
if self._cursorref is None or self._cursorref() is None:
c = _Cursor(self, *args, **kwargs)
self._cursorref = _weakref.ref(c)
self._lastuse = _time.time()
return c
finally:
self._lock.release()
_log(3, "cursor: creating new connection")
return connect(*self._args, **self._kwargs).cursor(*args, **kwargs)
class _Cursor(object):
def __init__(self, connection, *args, **kwargs):
self._cursor = None
_log(4, "_Cursor", connection, *args, **kwargs)
self._connection = connection
self._outer = connection._outerref()
self._cursor = connection._connection.cursor(*args, **kwargs)
def __repr__(self):
return "<dbpool._Cursor(%r) at %x>" % (self._cursor, id(self))
def close(self):
_log(4, "_Cursor.close", self)
self._connection._cursorref = None
self._connection = None
self._cursor.close()
self._outer = None
def __getattr__(self, attr):
return getattr(self._cursor, attr)
def _exiting():
global _make_available
_make_available = None
_atexit.register(_exiting)
| {
"repo_name": "jribbens/jonpy",
"path": "jon/dbpool.py",
"copies": "1",
"size": "7403",
"license": "mit",
"hash": 1728524679384096000,
"line_mean": 30.5021276596,
"line_max": 79,
"alpha_frac": 0.6251519654,
"autogenerated": false,
"ratio": 3.4400557620817844,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.949321428410474,
"avg_score": 0.014398688675408987,
"num_lines": 235
} |