metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jrambla/beacon-2.x",
"score": 3
} |
#### File: beacon_api/api/exceptions.py
```python
import json
import logging
from aiohttp import web
from .. import __apiVersion__
from ..conf import CONFIG_INFO
LOG = logging.getLogger(__name__)
class BeaconError(Exception):
"""BeaconError Exception specific class.
Generates custom exception messages based on request parameters.
"""
def __init__(self, request, host, error_code, error):
"""Return request data as dictionary."""
self.data = {'beaconId': '.'.join(reversed(host.split('.'))),
"apiVersion": __apiVersion__,
'exists': None,
'error': {'errorCode': error_code,
'errorMessage': error},
'alleleRequest': {'referenceName': request.get("referenceName", None),
'referenceBases': request.get("referenceBases", None),
'includeDatasetResponses': request.get("includeDatasetResponses", "NONE"),
'assemblyId': request.get("assemblyId", None)},
# showing empty datasetsAlleRsponse as no datasets found
# A null/None would represent no data while empty array represents
# none found or error and corresponds with exists null/None
'datasetAlleleResponses': []}
# include ids only if they are specified
# as per specification if they don't exist all datatsets will be queried
# Only one of `alternateBases` or `variantType` is required, validated by schema
oneof_fields = ["alternateBases", "variantType", "start", "end", "startMin", "startMax",
"endMin", "endMax", "ids"]
self.data['alleleRequest'].update({k: request.get(k) for k in oneof_fields if k in request})
return self.data
class BeaconBadRequest(BeaconError):
"""Exception returns with 400 code and a custom error message.
The method is called if one of the required parameters are missing or invalid.
Used in conjuction with JSON Schema validator.
"""
def __init__(self, request, host, error):
"""Return custom bad request exception."""
data = super().__init__(request, host, 400, error)
LOG.error(f'400 ERROR MESSAGE: {error}')
raise web.HTTPBadRequest(content_type="application/json", text=json.dumps(data))
class BeaconUnauthorised(BeaconError):
"""HTTP Exception returns with 401 code with a custom error message.
The method is called if the user is not registered or if the token from the authentication has expired.
Used in conjuction with Token authentication aiohttp middleware.
"""
def __init__(self, request, host, error, error_message):
"""Return custom unauthorized exception."""
data = super().__init__(request, host, 401, error)
headers_401 = {"WWW-Authenticate": f"Bearer realm=\"{CONFIG_INFO.url}\"\n\
error=\"{error}\"\n\
error_description=\"{error_message}\""}
LOG.error(f'401 ERROR MESSAGE: {error}')
raise web.HTTPUnauthorized(content_type="application/json", text=json.dumps(data),
# we use auth scheme Bearer by default
headers=headers_401)
class BeaconForbidden(BeaconError):
"""HTTP Exception returns with 403 code with the error message.
`'Resource not granted for authenticated user or resource protected for all users.'`.
The method is called if the dataset is protected or if the user is authenticated
but not granted the resource. Used in conjuction with Token authentication aiohttp middleware.
"""
def __init__(self, request, host, error):
"""Return custom forbidden exception."""
data = super().__init__(request, host, 403, error)
LOG.error(f'403 ERROR MESSAGE: {error}')
raise web.HTTPForbidden(content_type="application/json", text=json.dumps(data))
class BeaconServerError(BeaconError):
"""HTTP Exception returns with 500 code with the error message.
The 500 error is not specified by the Beacon API, thus as simple error would do.
"""
def __init__(self, error):
"""Return custom forbidden exception."""
data = {'errorCode': 500,
'errorMessage': error}
LOG.error(f'500 ERROR MESSAGE: {error}')
raise web.HTTPInternalServerError(content_type="application/json", text=json.dumps(data))
class BeaconAccesLevelsError(Exception):
"""BeaconAccesLevelsError Exception specific class.
Generates custom exception messages based on request parameters.
"""
def __init__(self, error, help):
"""Return custom forbidden exception."""
data = {'errorCode': 400,
'errorMessage': error,
'help': help}
LOG.error(f'400 ERROR MESSAGE: {error}')
raise web.HTTPBadRequest(content_type="application/json", text=json.dumps(data))
class BeaconServicesBadRequest(Exception):
"""BeaconServicesBadRequest Exception specific class.
Generates custom exception messages based on request parameters.
"""
def __init__(self, processed_request, host, error):
"""Return request data as dictionary."""
self.data = {'beaconId': '.'.join(reversed(host.split('.'))),
'error': {'errorCode': 400,
'errorMessage': error},
'servicesRequest': {'serviceType': processed_request.get("serviceType", None),
'model': processed_request.get("model", None),
'listFormat': processed_request.get("listFormat", None),
'apiVersion': processed_request.get("apiVersion", None)}}
LOG.error(f'400 ERROR MESSAGE: {error}')
raise web.HTTPBadRequest(content_type="application/json", text=json.dumps(self.data))
class BeaconAccessLevelsBadRequest(Exception):
"""BeaconAccessLevelsBadRequest Exception specific class.
Generates custom exception messages based on request parameters.
"""
def __init__(self, host, error):
"""Return request data as dictionary."""
self.data = {'beaconId': '.'.join(reversed(host.split('.'))),
'error': {'errorCode': 400,
'message': error},
'fields': {}}
LOG.error(f'400 ERROR MESSAGE: {error}')
raise web.HTTPBadRequest(content_type="application/json", text=json.dumps(self.data))
class BeaconBasicBadRequest(Exception):
"""BeaconBASICBadRequest Exception specific class.
Generates custom exception messages based on request parameters.
"""
def __init__(self, processed_request, host, error):
"""Return request data as dictionary."""
self.data = {'beaconId': '.'.join(reversed(host.split('.'))),
'error': {'errorCode': 400,
'errorMessage': error},
'request': processed_request}
LOG.error(f'400 ERROR MESSAGE: {error}')
raise web.HTTPBadRequest(content_type="application/json", text=json.dumps(self.data))
```
#### File: beacon_api/utils/polyvalent_functions.py
```python
import ast
import logging
import yaml
import requests
from pathlib import Path
from ..api.exceptions import BeaconBadRequest, BeaconServerError, BeaconForbidden, BeaconUnauthorised
from .. import __apiVersion__
from ..conf.config import DB_SCHEMA
LOG = logging.getLogger(__name__)
# ----------------------------------------------------------------------------------------------------------------------
# BASIC FUNCTIONS
# ----------------------------------------------------------------------------------------------------------------------
def create_prepstmt_variables(value):
"""Takes a value of how many prepared variables you want to pass a query
and creates a string to put it in it"""
dollars = []
for element in range(value):
element += 1
variable = "$" + str(element)
dollars.append(variable)
return ", ".join(dollars)
def filter_exists(include_dataset, datasets):
"""Return those datasets responses that the `includeDatasetResponses` parameter decides.
Look at the exist parameter in each returned dataset to established HIT or MISS.
"""
if include_dataset == 'ALL':
return datasets
elif include_dataset == 'NONE':
return []
elif include_dataset == 'HIT':
return [d for d in datasets if d['exists'] is True]
elif include_dataset == 'MISS':
return [d for d in datasets if d['exists'] is False]
def datasetHandover(dataset_name):
"""Return the datasetHandover with the correct name of the dataset."""
datasetHandover = [ { "handoverType" : {
"id" : "CUSTOM",
"label" : "Dataset info"
},
"note" : "Dataset information and DAC contact details in EGA Website",
"url" : f"https://ega-archive.org/datasets/{dataset_name}"
} ]
return datasetHandover
# ----------------------------------------------------------------------------------------------------------------------
# YAML LOADER
# ----------------------------------------------------------------------------------------------------------------------
def find_yml_and_load(input_file):
"""Try to load the access levels yaml and return it as a dict."""
file = Path(input_file)
if not file.exists():
LOG.error(f"The file '{file}' does not exist", file=sys.stderr)
return
if file.suffix in ('.yaml', '.yml'):
with open(file, 'r') as stream:
file_dict = yaml.safe_load(stream)
return file_dict
# Otherwise, fail
LOG.error(f"Unsupported format for {file}", file=sys.stderr)
# ----------------------------------------------------------------------------------------------------------------------
# FILTERING TERMS MANAGEMENT
# ----------------------------------------------------------------------------------------------------------------------
def parse_filters_request(filters_request_list):
"""Create a list of the filters passed in the query, where each filter
is another list in the main list with the following elements: ontology, term, operator, value.
"""
list_filters = []
for unprocessed_filter in filters_request_list:
filter_elements = unprocessed_filter.split(":")
ontology = filter_elements[0]
operator_switch = False
for operator in [">=", "<=", "=", ">", "<"]: # TO DO: raise an error if "=<" or "=>" are given
if operator in filter_elements[1]:
operator = operator
term = filter_elements[1].split(operator)[0]
value = filter_elements[1].split(operator)[1]
operator_switch = True
break
if operator_switch:
final_elements = [ontology, term, operator, value]
operator_switch = False
else:
final_elements = [ontology, filter_elements[1]]
list_filters.append(final_elements)
return list_filters
async def prepare_filter_parameter(db_pool, filters_request):
"""Parse the filters parameters given in the query to create the string that needs to be passed
to the SQL query.
e.g. '(technology)::jsonb ?& array[''Illumina Genome Analyzer II'', ''Illumina HiSeq 2000''] AND
(other)::jsonb ?& array[''example1'', ''example2'']
"""
# First we want to parse the filters request
if isinstance(filters_request, list):
list_filters = parse_filters_request(filters_request)
else:
list_filters = parse_filters_request(ast.literal_eval(filters_request))
combinations_list = "','".join([":".join([filter_elements[0],filter_elements[1]]) for filter_elements in list_filters])
combinations_list = "'" + combinations_list + "'"
# Then we connect to the DB and retrieve the parameters that will be passed to the main query
async with db_pool.acquire(timeout=180) as connection:
try:
query = f"""SELECT target_table, column_name, column_value
FROM ontology_term_column_correspondance
WHERE concat_ws(':', ontology, term) IN ({combinations_list})"""
LOG.debug(f"QUERY filters info: {query}")
statement = await connection.prepare(query)
db_response = await statement.fetch()
filter_dict = {}
for record in list(db_response):
if record["target_table"] not in filter_dict.keys():
filter_dict[record["target_table"]] = {}
filter_dict[record["target_table"]][record["column_name"]] = []
filter_dict[record["target_table"]][record["column_name"]].append(record["column_value"])
elif record["column_name"] not in filter_dict[record["target_table"]].keys():
filter_dict[record["target_table"]][record["column_name"]] = []
filter_dict[record["target_table"]][record["column_name"]].append(record["column_value"])
else:
filter_dict[record["target_table"]][record["column_name"]].append(record["column_value"])
# After we have retrieved the values in a dict with the target_table as keys and as value another dict with column_name as keys, we need to create the final string
strings_list = []
final_string = ""
for target_table, column_name_dict in filter_dict.items():
if target_table == "public.beacon_dataset_table":
for column_name, values in column_name_dict.items():
string_values = ", ".join("'" + str(value) + "'" for value in values)
string = f'({column_name})::jsonb ?& array[{string_values}]'
strings_list.append(string)
# Once we have the response, we parse it to create the final string needed as input
if not strings_list:
final_string = 'null'
else:
final_string = " AND ".join(strings_list)
return str(final_string), filter_dict
except Exception as e:
raise BeaconServerError(f'Query filters DB error: {e}')
# ----------------------------------------------------------------------------------------------------------------------
# ACCESS RELATED FUNCTIONS AND DICT
# ----------------------------------------------------------------------------------------------------------------------
def access_resolution(request, token, host, public_data, registered_data, controlled_data):
"""Determine the access level for a user.
Depends on user bona_fide_status, and by default it should be PUBLIC.
"""
permissions = []
# all should have access to PUBLIC datasets
# unless the request is for specific datasets
if public_data:
permissions.append("PUBLIC")
access = set(public_data) # empty if no datasets are given
# for now we are expecting that the permissions are a list of datasets
if registered_data and token["bona_fide_status"] is True:
permissions.append("REGISTERED")
access = access.union(set(registered_data))
# if user requests public datasets do not throw an error
# if both registered and controlled datasets are request this will be shown first
elif registered_data and not public_data:
if token["authenticated"] is False:
# token is not provided (user not authed)
raise BeaconUnauthorised(request, host, "missing_token", 'Unauthorized access to dataset(s), missing token.')
# token is present, but is missing perms (user authed but no access)
raise BeaconForbidden(request, host, 'Access to dataset(s) is forbidden.')
if controlled_data and 'permissions' in token and token['permissions']:
# The idea is to return only accessible datasets
# Default event, when user doesn't specify dataset ids
# Contains only dataset ids from token that are present at beacon
controlled_access = set(controlled_data).intersection(set(token['permissions']))
access = access.union(controlled_access)
if controlled_access:
permissions.append("CONTROLLED")
# if user requests public datasets do not throw an error
# By default permissions cannot be None, at worst empty set, thus this might never be reached
elif controlled_data and not (public_data or registered_data):
if token["authenticated"] is False:
# token is not provided (user not authed)
raise BeaconUnauthorised(request, host, "missing_token", 'Unauthorized access to dataset(s), missing token.')
# token is present, but is missing perms (user authed but no access)
raise BeaconForbidden(request, host, 'Access to dataset(s) is forbidden.')
LOG.info(f"Accesible datasets are: {list(access)}.")
return permissions, list(access)
async def fetch_datasets_access(db_pool, datasets):
"""Retrieve 3 list of the available datasets depending on the access type"""
LOG.info('Retrieving info about the available datasets (id and access type).')
public = []
registered = []
controlled = []
async with db_pool.acquire(timeout=180) as connection:
async with connection.transaction():
datasets_query = None if datasets == "null" or not datasets else ast.literal_eval(datasets)
try:
query = f"""SELECT access_type, id, stable_id FROM {DB_SCHEMA}.beacon_dataset
WHERE coalesce(stable_id = any($1), true);
"""
LOG.debug(f"QUERY datasets access: {query}")
statement = await connection.prepare(query)
db_response = await statement.fetch(datasets_query)
for record in list(db_response):
if record['access_type'] == 'PUBLIC':
public.append(record['id'])
if record['access_type'] == 'REGISTERED':
registered.append(record['id'])
if record['access_type'] == 'CONTROLLED':
controlled.append(record['id'])
return public, registered, controlled
except Exception as e:
raise BeaconServerError(f'Query available datasets DB error: {e}')
# ----------------------------------------------------------------------------------------------------------------------
# FILTER RESPONSE BASED ON ACCESS LEVELS
# ----------------------------------------------------------------------------------------------------------------------
def filter_response(response, access_levels_dict, accessible_datasets, user_levels, field2access, parent_key=None):
"""
Recursive function that parses the response of the beacon to filter out those fields that are
not accessible for the user (based on the access level).
:response: beacon response
:access_levels_dict: access levels dictionary created out of the yml file in /utils
:accessible_datasets: list of datasets accessible by the user (taking into account its privileges)
:user_levels: list of levels that the user has, i.e ['PUBLIC', 'REGISTERED']
:field2access: dictionary that maps the child_field name to its corresponding parent_field name in the access levels dict (i.e 'datasets' inside the parent 'beacon' maps to its parent name 'beaconDataset')
:parent_key: used inside de recursion to store the parent key of the dict we are in
"""
final_dict = {}
if isinstance(response, dict):
for key, val in response.items():
translated_key = field2access[key] if key in field2access.keys() else key
specific_access_levels_dict = access_levels_dict[parent_key] if parent_key else access_levels_dict
if translated_key not in access_levels_dict.keys() and translated_key not in specific_access_levels_dict.keys():
final_dict[key] = val
else:
# if (isinstance(val, dict) or isinstance(val, list)) and key != "info":
if (isinstance(val, dict) or isinstance(val, list)) and translated_key in access_levels_dict.keys():
parent_permission = True
self_permission = True if access_levels_dict[translated_key]["accessLevelSummary"] in user_levels else False
if parent_key:
parent_permission = True if access_levels_dict[parent_key][key] in user_levels else False
if self_permission and parent_permission:
final_dict[key] = filter_response(val, access_levels_dict, accessible_datasets, user_levels, field2access, translated_key)
else:
valid_level = access_levels_dict[parent_key][translated_key] if parent_key else access_levels_dict[translated_key]
if valid_level in user_levels:
final_dict[key] = val
elif isinstance(response, list):
filtered = []
for element in response:
if isinstance(element, dict):
datasetId = element.get("internalId")
if not datasetId or datasetId in accessible_datasets: # controlling specific access permission to show a dataset response
filtered.append(filter_response(element, access_levels_dict, accessible_datasets, user_levels, field2access, parent_key))
return filtered
return final_dict
# ----------------------------------------------------------------------------------------------------------------------
# VARIANT HANDOVER and extra ANNOTATION
# ----------------------------------------------------------------------------------------------------------------------
def snp_resultsHandover(variantId):
"""Create the resultsHanover dict by inserting the variantId into the template."""
resultsHandover = [ {
"handoverType" : {
"id" : "data:1106",
"label" : "dbSNP ID"
},
"note" : "Link to dbSNP database",
"url" : f"https://www.ncbi.nlm.nih.gov/snp/?term={variantId}"
}, {
"handoverType" : {
"id" : "data:1106",
"label" : "dbSNP ID"
},
"note" : "Link to dbSNP API",
"url" : f"https://api.ncbi.nlm.nih.gov/variation/v0/beta/refsnp/{variantId[2:]}"
} ]
return resultsHandover
async def fetch_variantAnnotations(variant_details):
"""
Create the a part of the variantsAnnotation response by fetching the cellBase API and the dbSNP API.
The variant_id has to be in the following format: chrom:start:ref:alt.
If in the variantDetails the alt is null, it has to be changed to a '-'.
"""
# cellBase
chrom = variant_details.get("chromosome") if variant_details.get("chromosome") else variant_details.get("referenceName")
start = variant_details.get("start")
ref = variant_details.get("referenceBases")
alt = variant_details.get("alternateBases") if variant_details.get("alternateBases") else '-'
variant_id = ":".join([str(chrom), str(start + 1), ref, alt])
url = f"http://cellbase.clinbioinfosspa.es/cb/webservices/rest/v4/hsapiens/genomic/variant/{variant_id}/annotation"
r = requests.get(url)
cellBase_dict = r.json() if r else ''
try:
cellBase_rsID = cellBase_dict["response"][0]["result"][0]["id"]
except:
cellBase_rsID = None
# dbSNP
rsID = variant_details.get("variantId") if (variant_details.get("variantId") and variant_details.get("variantId") != ".") else cellBase_rsID
if rsID:
url = f"https://api.ncbi.nlm.nih.gov/variation/v0/beta/refsnp/{rsID[2:]}"
r = requests.get(url)
dnSNP_dict = r.json() if r else ''
else:
dnSNP_dict = ''
return rsID, cellBase_dict, dnSNP_dict
``` |
{
"source": "jramcast/ml_weather",
"score": 4
} |
#### File: ml_weather/example2/example2.py
```python
import csv
import numpy as np
from random import shuffle
from sklearn.datasets import make_blobs
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import learning_curve
from sklearn.model_selection import ShuffleSplit
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
from frequent import get_most_frequent_terms
# Prepare future numpy matrices
X = []
Y_sentiment = []
Y_when = []
Y_type = []
#lambda regularization param
lambda_reg = 0.3
# solver for minification optimization
solver = 'liblinear'
# Structures to hold original data
# We want to store original data to perform manual error analysis on some tweets
original_data = []
original_data_key = 0
print("Reading CSV...")
csvfile = open('data/train.csv', newline='')
datareader = csv.DictReader(csvfile)
data = list(datareader)
print("Shuffling data and selecting small subset...")
shuffle(data)
data = data[0: 5000]
how_many_features = 500
print("Getting most {} common words as features.".format(how_many_features))
most_frequent_terms = list()
def filter_tweets():
only_training_tweets = data[0: int(len(data)*0.6)]
for row in only_training_tweets:
yield row['tweet']
most_frequent_terms = get_most_frequent_terms(filter_tweets(), how_many_features)
print(most_frequent_terms)
automatic_features = [text for text, times in most_frequent_terms]
print("Generating data features...")
for row in data:
keywords_in_tweet = []
state_in_tweet = 0
location_in_tweet = 0
# check whether each keyword is inside tweet
tweet = row['tweet'].lower()
for keyword in automatic_features:
if keyword in tweet:
keywords_in_tweet.append(1)
else:
keywords_in_tweet.append(0)
# check whether state is inside tweet
if row['state'] in row['tweet']:
state_in_tweet = 1
# check whether location is inside tweet
if row['location'] in row['tweet']:
location_in_tweet = 1
# each row must have 3 classes: sentiment, when and type
# we found the class of each row by looking a the one with max value
y_sentiment_classes = [row['s1'], row['s2'], row['s3'], row['s4'], row['s5']]
y_when_classes = [row['w1'], row['w2'], row['w3'], row['w4']]
y_type_classes = [row['k1'], row['k2'], row['k3'], row['k4'], row['k5'], row['k6'], row['k7'], row['k8'],
row['k9'], row['k10'], row['k11'], row['k12'], row['k13'], row['k14'], row['k15']]
# we sum 1 to have 1-indexed classes, e.g 1 equals s1
y_sentiment = y_sentiment_classes.index(max(y_sentiment_classes)) + 1
y_when = y_when_classes.index(max(y_when_classes)) + 1
y_type = y_type_classes.index(max(y_type_classes)) + 1
# now generate the numeric arrays x and y
x_row = [original_data_key] + keywords_in_tweet + [state_in_tweet, location_in_tweet, y_sentiment, y_when, y_type]
X.append(x_row)
# Store the original example in a dictionary for future exploration
row['classes'] = ("s{}".format(y_sentiment), "w{}".format(y_when), "k{}".format(y_type))
row['data_key'] = original_data_key
original_data.append(row)
original_data_key = original_data_key + 1
print("Converting data to numpy matrix")
X = np.matrix(X)
def plot_learning_curve(estimator, title, X, y, ylim=None, cv=None,
n_jobs=1, train_sizes=np.linspace(.1, 1.0, 5)):
"""
Generate a simple plot of the test and training learning curve.
Parameters
----------
estimator : object type that implements the "fit" and "predict" methods
An object of that type which is cloned for each validation.
title : string
Title for the chart.
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape (n_samples) or (n_samples, n_features), optional
Target relative to X for classification or regression;
None for unsupervised learning.
ylim : tuple, shape (ymin, ymax), optional
Defines minimum and maximum yvalues plotted.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if ``y`` is binary or multiclass,
:class:`StratifiedKFold` used. If the estimator is not a classifier
or if ``y`` is neither binary nor multiclass, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validators that can be used here.
n_jobs : integer, optional
Number of jobs to run in parallel (default 1).
"""
plt.figure()
plt.title(title)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel("Training examples")
plt.ylabel("Error")
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=cv, n_jobs=n_jobs, train_sizes=train_sizes)
train_scores = 1 - train_scores
test_scores = 1 - test_scores
train_scores_mean = np.mean(train_scores, axis=1)
train_scores_std = np.std(train_scores, axis=1)
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_std = np.std(test_scores, axis=1)
plt.grid()
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1,
color="r")
plt.fill_between(train_sizes, test_scores_mean - test_scores_std,
test_scores_mean + test_scores_std, alpha=0.1, color="g")
plt.plot(train_sizes, train_scores_mean, 'o-', color="r",
label="Training error")
plt.plot(train_sizes, test_scores_mean, 'o-', color="g",
label="Cross-validation error")
plt.legend(loc="best")
return plt
# Cross validation with 2 iterations to get smoother mean test and train
# score curves, each time with 20% data randomly selected as a validation set.
cv = ShuffleSplit(n_splits=2, test_size=0.2, random_state=0)
n = X.shape[1]
estimator = LogisticRegression(C=1/lambda_reg, solver=solver)
print("Training and plotting learning curves...")
plot_learning_curve(estimator, 'Learning curve', X[:, 0:n-3], np.ravel(X[:, n-1]), ylim=(0, 0.5), cv=cv, n_jobs=1)
plt.show()
```
#### File: ml_weather/example4/example4.py
```python
import csv
import math
from random import shuffle
import numpy as np
import string
from sklearn.neural_network import MLPClassifier
from sklearn.model_selection import cross_val_score
from frequent import get_most_frequent_terms
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
from nltk.corpus import stopwords
from nltk.tokenize import TweetTokenizer
# Prepare stopwords
punctuation = list(string.punctuation)
stopwords_list = stopwords.words('english') + punctuation \
+ ['rt', 'via', '…', '...', '️', 'ヽ', '、', '`' ]
print("Reading CSV...")
csvfile = open('data/train.csv', newline='')
datareader = csv.DictReader(csvfile)
data = list(datareader)
print("Shuffling data...")
shuffle(data)
data = data[0: 10000]
HOW_MANY_FEATURES = 1000
print("Selecting features based on the most {} common words.".format(HOW_MANY_FEATURES))
tokenizer = TweetTokenizer(preserve_case=True)
vectorizer = TfidfVectorizer(
min_df=1,
max_features=HOW_MANY_FEATURES,
ngram_range=(1, 3),
stop_words=stopwords_list,
#tokenizer=tokenizer.tokenize
)
def filter_tweets(data):
for row in data:
yield row['tweet']
print("Generating data features...")
X = vectorizer.fit_transform(filter_tweets(data))
print(vectorizer.get_feature_names())
X = X.toarray()
y = []
for row in data:
# for now, we only use the weather type class
y_type_classes = [row['k1'], row['k2'], row['k3'], row['k4'], row['k5'], row['k6'], row['k7'], row['k8'],
row['k9'], row['k10'], row['k11'], row['k12'], row['k13'], row['k14'], row['k15']]
y_row = [ float(val) for val in y_type_classes ]
y.append(y_row)
print("Converting data to numpy matrix")
X = np.matrix(X)
y = np.matrix(y)
sigmoider = lambda val: 1 if float(val) >= 0.3 else 0
vsigmoid = np.vectorize(sigmoider)
print("Training...")
classifier = MLPClassifier(hidden_layer_sizes=(2000, 1000, 500))
precision = cross_val_score(classifier, X, vsigmoid(y), scoring='precision_weighted')
recall = cross_val_score(classifier, X, vsigmoid(y), scoring='recall_weighted')
meanprecision = np.mean(precision)
meanrecall = np.mean(recall)
print('Precision')
print(meanprecision)
print('Recall')
print(meanrecall)
print('F1')
print(2 * (meanprecision * meanrecall) / (meanprecision + meanrecall) )
```
#### File: ml_weather/example6/frequent.py
```python
import csv
import string
from pprint import pprint
from nltk.corpus import stopwords
from collections import Counter
from nltk.tokenize import TweetTokenizer
# Prepare stopwords
punctuation = list(string.punctuation)
stopwords_list = stopwords.words('english') + punctuation \
+ ['rt', 'via', '…', '...', '️', 'ヽ', '、', '`' ]
def get_most_frequent_terms(tweets, how_many):
tokenizer = TweetTokenizer(preserve_case=False)
counter = Counter()
for tweet in tweets:
tokens = tokenizer.tokenize(tweet)
filtered_tokens = [token for token in tokens if token not in stopwords_list]
counter.update(filtered_tokens)
return counter.most_common(how_many)
``` |
{
"source": "jramcast/music-genre-classification-audioset",
"score": 3
} |
#### File: mgc/audioset/transform.py
```python
import numpy as np
import tensorflow as tf
from mgc.audioset.ontology import MUSIC_GENRE_CLASSES
def subset_by_class(X, y, classes=[]):
classes_ids = [c['index'] for c in classes]
# Select only samples that have any of the classes active
sample_indexes = np.unique(np.nonzero(y[:, classes_ids])[0])
filtered_X = X[sample_indexes, :]
filtered_y = y[sample_indexes, :]
filtered_y = take_y_for_classes(filtered_y, classes)
return filtered_X, filtered_y
def take_y_for_classes(y, classes=[]):
classes_ids = [c['index'] for c in classes]
return y[:, classes_ids]
def flatten_features(X: np.array) -> np.array:
'''
Flattens a (num_samples x 10 x 128) array to (num_samples x 1280).
Audioset provides 128 features per second, with 10 seconds per sample.
Use this method when you need a single dimension of features.
'''
return np.array(X).reshape(-1, 1280)
def tensor_to_numpy(ids_tensor, X_tensor, y_tensor):
with tf.Session() as sess:
ids = np.array([])
X = np.ndarray((0, 10, 128))
y = np.ndarray((0, len(MUSIC_GENRE_CLASSES)))
while True:
try:
(ids_batch, features_batch, labels_batch) = sess.run(
(ids_tensor, X_tensor, y_tensor)
)
ids = np.concatenate([ids, ids_batch])
X = np.concatenate([X, features_batch], axis=0)
y = np.concatenate([y, labels_batch], axis=0)
except tf.errors.OutOfRangeError:
break
return ids, X, y
```
#### File: music-genre-classification-audioset/mgc/evaluation.py
```python
import logging
from sklearn.base import BaseEstimator
from keras.models import Model
from mgc.experiments.base import Evaluator
from mgc.metrics import MetricsLogger
from mgc import audioset
class SklearnModelEvaluator(Evaluator):
def __init__(self, classmetrics_filepath):
self.classmetrics_filepath = classmetrics_filepath
def evaluate(self, model: BaseEstimator, X, y, X_test, y_test):
metrics_logger = MetricsLogger(
classes=audioset.ontology.MUSIC_GENRE_CLASSES,
classsmetrics_filepath=self.classmetrics_filepath,
show_top_classes=25,
class_sort_key='ap'
)
logging.info('---- Train stats ----')
predictions = model.predict(X)
metrics_logger.log(predictions, y)
logging.info('---- Test stats ----')
predictions = model.predict(X_test)
metrics_logger.log(predictions, y_test, show_classes=True)
class KerasModelEvaluator(Evaluator):
def __init__(self, classmetrics_filepath):
self.classmetrics_filepath = classmetrics_filepath
def evaluate(self, model: Model, X, y, X_test, y_test):
metrics_logger = MetricsLogger(
classes=audioset.ontology.MUSIC_GENRE_CLASSES,
classsmetrics_filepath=self.classmetrics_filepath,
show_top_classes=25,
class_sort_key='ap'
)
logging.info('---- Test stats ----')
predictions = model.predict(X_test)
metrics_logger.log(predictions, y_test, show_classes=True)
```
#### File: mgc/experiments/tree.py
```python
import logging
import time
from sklearn.tree import DecisionTreeClassifier
from mgc.experiments.base import Experiment
class DecisionTreeExperiment(Experiment):
def run(self):
'''
Runs the experiment
'''
X, y, X_test, y_test = self.data_loader.load(
self.balanced,
repeat=False
)
model = self.train(X, y)
self.persistence.save(model)
self.evaluator.evaluate(model, X, y, X_test, y_test)
print('Done. Check the logs/ folder for results')
def train(self, X, y):
model = DecisionTreeClassifier()
logging.info('Model: %s', model)
logging.info('Training...')
start_time = time.time()
model.fit(X, y)
logging.info(
'Training done: {:.3f} s'.format(time.time() - start_time)
)
return model
```
#### File: mgc/tests/transform_test.py
```python
import numpy as np
from mgc.audioset import transform, ontology
MUSIC_GENRE_CLASSES = ontology.find_children('Music genre')
def test_take_y_for_classes_returns_a_matrix_only_for_the_requested_classes():
# GIVEN 3 samples with 527 possible labels each
num_samples = 3
num_classes = 527
y = np.full((num_samples, num_classes), 0)
# We initialize the values of the 3 samples for some
# random classes to later check that they are in the right place
class_216 = [0, 0, 1]
class_231 = [0, 1, 0]
class_237 = [0, 1, 1]
class_265 = [1, 0, 0]
class_30 = [1, 0, 1]
y[:, 216] = class_216
y[:, 231] = class_231
y[:, 237] = class_237
y[:, 265] = class_265
y[:, 30] = class_30
# GIVEN only music genre labels are taken from the 527
y = transform.take_y_for_classes(y, MUSIC_GENRE_CLASSES)
# THEN we expect to have only the selected classes
assert y.shape == (3, 53)
def test_take_y_for_classes_makes_the_right_selection():
# GIVEN 3 samples with 527 possible labels each
num_samples = 3
num_classes = 527
y = np.full((num_samples, num_classes), 0)
# We initialize the values of the 3 samples for some
# random classes to later check that they are in the right place
class_216 = [0, 0, 1]
class_231 = [0, 1, 0]
class_237 = [0, 1, 1]
class_265 = [1, 0, 0]
class_30 = [1, 0, 1]
y[:, 216] = class_216
y[:, 231] = class_231
y[:, 237] = class_237
y[:, 265] = class_265
y[:, 30] = class_30
# GIVEN only music genre labels are taken from the 527
y = transform.take_y_for_classes(y, MUSIC_GENRE_CLASSES)
# THEN we expect to see the selected classes in the exact same positions
# that they have in the MUSIC_GENRE_CLASSES list
assert np.array_equal(y[:, 0], class_216)
assert np.array_equal(y[:, 40], class_30)
assert np.array_equal(y[:, 15], class_231)
assert np.array_equal(y[:, 21], class_237)
assert np.array_equal(y[:, 52], class_265)
``` |
{
"source": "jramcast/music-recommender",
"score": 3
} |
#### File: commands/msd/run_matrix_fact.py
```python
import os
import sys
sys.path.insert(
0,
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"../../../.."
)
)
import pathlib
import json
import pandas as pd
import numpy as np
from timeit import default_timer as timer
from scipy.sparse import csc_matrix, save_npz, load_npz
from recommender.domain.scoring import msd_mAP
from sklearn.decomposition import TruncatedSVD
data_path = os.path.join(
pathlib.Path(__file__).parent.absolute(),
"../../../../data/msdchallenge/"
)
kaggle_users_filepath = os.path.join(
data_path, "kaggle_challenge_files/kaggle_users.txt"
)
kaggle_songs_filepath = os.path.join(
data_path, "kaggle_challenge_files/kaggle_songs.txt"
)
tfd_train_triplets_filepath = os.path.join(
data_path, "TasteProfileDataset/train_triplets.txt"
)
csc_filepath = os.path.join(
data_path, "csc_matrix.npz"
)
songs_to_index_filepath = os.path.join(
data_path, "songs_to_index.json"
)
users_to_index_filepath = os.path.join(
data_path, "users_to_index.json"
)
eval_triplets_filepath = os.path.join(
data_path, "kaggle_challenge_files/kaggle_visible_evaluation_triplets.txt"
)
def get_train_triplets():
with open(tfd_train_triplets_filepath, "r") as f:
# train_triplets = [line.strip().split("\t") for line in f] # generates [user, song, count]
for line in f:
yield line.strip().split("\t")
# Build users list
print("Building users list...")
## Include kaggle users
users = []
with open(kaggle_users_filepath, "r") as f:
users = [userid.strip() for userid in f.readlines()]
## And also users from train triplets
users = set(users + [user for user, _, _ in get_train_triplets()])
users_to_index = dict([(userid, idx) for (idx, userid) in enumerate(users)])
print("Building songs list...")
# Build songs list
# (kaggle already has all songs so we only need to get them from the kaggle file)
songs_to_index = {}
songs_index_to_ids = {}
with open(kaggle_songs_filepath, "r") as f:
for id_and_index in f.readlines():
songid, index = id_and_index.strip().split(" ")
index = int(index)
songs_to_index[songid] = index
songs_index_to_ids[index] = songid
def load_taste_dataset_as_csc_matrix():
try:
return load_npz(csc_filepath)
except:
return generate_csc_matrix()
def generate_csc_matrix():
global songs_to_index
print("Processing listening counts...")
users_to_songs = {}
for userid, songid, count in get_train_triplets():
count = int(count)
if songid not in songs_to_index:
print("song " + songid + " not in song_to_index")
if userid in users_to_songs:
# No user-song duplicates
# if song in users_to_songs[user]:
# users_to_songs[user][song] += count
# else:
# users_to_songs[user][song] = count
users_to_songs[userid][songid] = count
else:
users_to_songs[userid] = { songid: count }
# # Map songids to indexes
# with open(songs_to_index_filepath, 'w') as outfile:
# songs_to_index = { song: i for i, song in enumerate(song_ids) }
# json.dump(songs_to_index, outfile, separators=(',', ':'))
# # Map user ids to indexes
# with open(users_to_index_filepath, 'w') as outfile:
# users_to_index = { user: i for i, user in enumerate(users_to_songs.keys()) }
# json.dump(users_to_index, outfile, separators=(',', ':'))
# Build co-ocurrence matrix
print("Building users to songs matrix...")
data_user_indexes = []
data_song_indexes = []
data = []
for userid in users_to_index.keys():
user_index = int(users_to_index[userid])
for songid in users_to_songs.get(userid, {}).keys():
play_count = users_to_songs[userid][songid]
song_index = int(songs_to_index[songid])
data.append(play_count)
data_user_indexes.append(user_index)
data_song_indexes.append(song_index)
data = np.array(data)
data_user_indexes = np.array(data_user_indexes)
data_song_indexes = np.array(data_song_indexes)
## Generate co-ocurrence matrix
data = csc_matrix((data, (data_user_indexes, data_song_indexes)))
# Persist data
save_npz(csc_filepath, data)
return data
# try:
# with open(users_to_index_filepath) as json_file:
# users_to_index = json.load(json_file)
# except:
# generate_csc_matrix()
# with open(users_to_index_filepath) as json_file:
# users_to_index = json.load(json_file)
# try:
# with open(songs_to_index_filepath) as json_file:
# songs_to_index = json.load(json_file)
# except:
# generate_csc_matrix()
# with open(songs_to_index_filepath) as json_file:
# songs_to_index = json.load(json_file)
# Load data
X = load_taste_dataset_as_csc_matrix()
print("Loaded data")
# TODO: Normalize user listenings count: song count / total user counts
# Run matrix factorization
n_components = 5
start = timer()
print(f"Training with {n_components} components...")
model = TruncatedSVD(n_components, random_state=42)
W = model.fit_transform(X)
S = model.components_
end = timer()
elapsed = end - start
print(f"Traing done in {elapsed} seconds")
def predict(userid, songid):
user_idx = users_to_index[userid]
song_idx = int(songs_to_index[songid])
return np.dot(W[user_idx], S[:, song_idx])
def predict_all(userid):
user_idx = users_to_index[userid]
return np.dot(W[user_idx], S)
# Predict example
print("\n==== Predict example ====")
print(
predict("b80344d063b5ccb3212f76538f3d9e43d87dca9e", "SOIZAZL12A6701C53B")
)
print("\n\n====== EVALUATION ==========")
# Load evaluation users list
eval_users = []
with open(kaggle_users_filepath, "r") as f:
eval_users = [line.strip() for line in f.readlines()]
print("\n==== Total eval users ====")
print(len(eval_users))
# Load evaluation songs list
eval_songs = []
with open(kaggle_songs_filepath, "r") as f:
eval_songs = [id_and_index.strip().split(" ")[0] for id_and_index in f.readlines()]
# Load evaluation listening histories
eval_listen_count = {}
with open(eval_triplets_filepath, "r") as f:
line = f.readline()
while line:
user, song, _ = line.strip().split("\t")
songidx = songs_to_index[song]
if user in eval_listen_count:
eval_listen_count[user].add(songidx)
else:
eval_listen_count[user] = set([songidx])
line = f.readline()
# Compute recommendations for evaluation users
recommendations = {}
eval_users = eval_users[:1000]
for i, userid in enumerate(eval_users):
predictions = predict_all(userid)
# To get a rank, we must revers the order
# We also need to limit the rank to 500
ranked = np.argsort(predictions)[::-1][:500]
# user_song_scores = sorted(
# songs_to_index.keys(),
# key=lambda songid: predictions[int(songs_to_index[songid])],
# reverse=True
# )
# recommendations[userid] = [songs_index_to_ids[idx] for idx in ranked]
recommendations[userid] = ranked
print(f"Generated recommendations for {i} users")
score = msd_mAP(
eval_users,
recommendations,
eval_listen_count
)
print("mAP: ", score)
```
#### File: app/commands/run_collaborative_knn_recommendation_example.py
```python
import os
import pandas as pd
import pathlib
from scipy.sparse import csr_matrix
from sklearn.neighbors import NearestNeighbors
from fuzzywuzzy import fuzz
# Load data
movies_filename = "movie.csv"
ratings_filename = "rating.csv"
data_path = os.path.join(
pathlib.Path(__file__).parent.absolute(),
"../../../data/examples"
)
df_movies = pd.read_csv(
os.path.join(data_path, movies_filename),
usecols=["movieId", "title"],
dtype={"movieId": "int32", "title": "str"}
)
df_ratings = pd.read_csv(
os.path.join(data_path, ratings_filename),
usecols=["userId", "movieId", "rating"],
dtype={"userId": "int32", "movieId": "int32", "rating": "float32"}
)
# Only take first 10000 to make this example faster
df_ratings = df_ratings[:100000]
print("========= MOVIES DF =========")
print("MOVIES DF HEAD\n", df_movies.head(), end="\n\n")
print("MOVIES DF SHAPE", df_movies.shape, end="\n\n")
print("========= RATINGS DF =========")
print("RATINGS DF HEAD\n", df_ratings.head(), end="\n\n")
print("RATINGS DF SHAPE", df_ratings.shape, end="\n\n")
# Most of the movies in the dataset do not have any rating,
# so we will only take in account those users
# that have rated more than 5 movies
popularity_thres = 5
df_movies_cnt = pd.DataFrame(
df_ratings.groupby("movieId").size(),
columns=["count"]
)
popular_movies = list(
set(df_movies_cnt.query("count >= @popularity_thres").index
))
df_ratings_drop_movies = df_ratings[df_ratings.movieId.isin(popular_movies)]
print("shape of original ratings data: ", df_ratings.shape)
print(
"shape of ratings data after dropping unpopular movies: ",
df_ratings_drop_movies.shape
)
# Similarly, we only take into account those movies that have been rated
# more than 5 times
ratings_thres = 5
df_users_cnt = pd.DataFrame(
df_ratings_drop_movies.groupby("userId").size(),
columns=["count"]
)
active_users = list(set(df_users_cnt.query("count >= @ratings_thres").index))
df_ratings_drop_users = df_ratings_drop_movies[
df_ratings_drop_movies.userId.isin(active_users)
]
print("shape of original ratings data: ", df_ratings.shape)
print(
"shape of ratings data after dropping unpopular/inactive movies/users: ",
df_ratings_drop_users.shape
)
# RATING MATRIX
# Pivot ratings into movie features to get a rating matrix
# Each movie is a row and each user is a colum, values are ratings
# 0 indicates no rating
movie_user_mat = df_ratings_drop_movies.pivot(
index="movieId",
columns="userId",
values="rating"
).fillna(0)
print("========= MOVIE FEATURES DF =========")
print("MOVIE FEATURES DF HEAD\n", movie_user_mat.head(), end="\n\n")
print("MOVIE FEATURES DF SHAPE", movie_user_mat.shape, end="\n\n")
# Because many values are zero (the matrix is extremely sparse),
# we convert the matrix into a Compressed Sparse Matrix for better efficiency:
# https://docs.scipy.org/doc/scipy-0.18.1/reference/generated/scipy.sparse.csr_matrix.html
movie_user_mat_sparse = csr_matrix(movie_user_mat.values)
# We also need a map of movie titles to ids for pretty printing
movies_list = list(
df_movies.set_index("movieId").loc[movie_user_mat.index].title
)
movie_to_idx = {
movie: i for i, movie in enumerate(movies_list)
}
# Fit the Knn classifier
model_knn = NearestNeighbors(
metric="cosine",
algorithm="brute",
n_neighbors=20,
n_jobs=-1
)
print("Training........")
model_knn.fit(movie_user_mat_sparse)
def make_recommendation(model_knn, data, mapper, fav_movie, n_recommendations):
"""
return top n similar movie recommendations based on user"s input movie
Parameters
----------
model_knn: sklearn model, knn model
data: movie-user matrix
mapper: dict, map movie title name to index of the movie in data
fav_movie: str, name of user input movie
n_recommendations: int, top n recommendations
Return
------
list of top n similar movie recommendations
"""
# fit
model_knn.fit(data)
# get input movie index
print("You have input movie:", fav_movie)
idx = fuzzy_matching(mapper, fav_movie, verbose=True)
print("Recommendation system start to make inference")
print("......\n")
distances, indices = model_knn.kneighbors(
data[idx], n_neighbors=n_recommendations+1)
raw_recommends = sorted(list(zip(indices.squeeze().tolist(
), distances.squeeze().tolist())), key=lambda x: x[1])[:0:-1]
# get reverse mapper
reverse_mapper = {v: k for k, v in mapper.items()}
# print recommendations
print("Recommendations for {}:".format(fav_movie))
for i, (idx, dist) in enumerate(raw_recommends):
print("{0}: {1}, with distance of {2}".format(
i+1, reverse_mapper[idx], dist))
def fuzzy_matching(mapper, fav_movie, verbose=True):
"""
return the closest text match via fuzzy ratio.
Parameters
----------
mapper: dict, map movie title name to index of the movie in data
fav_movie: str, name of user input movie
verbose: bool, print log if True
Return
------
index of the closest match
"""
match_tuple = []
# get match
for title, idx in mapper.items():
ratio = fuzz.ratio(title.lower(), fav_movie.lower())
if ratio >= 60:
match_tuple.append((title, idx, ratio))
# sort
match_tuple = sorted(match_tuple, key=lambda x: x[2])[::-1]
if not match_tuple:
print("Oops! No match is found")
return
if verbose:
print("Found possible matches in our database: {0}\n".format(
[x[0] for x in match_tuple]))
return match_tuple[0][1]
if __name__ == "__main__":
"""
Test the recommendation model:
K-nn has issues:
* popularity bias
* item cold-start: new movies won't have ratings,
hence won't be recommended
* scalability: most of the values in the ratings(movie-user)
sparse matrix will be 0, which is a waste of space.
"""
my_favorite = "Toy Story"
make_recommendation(
model_knn=model_knn,
data=movie_user_mat_sparse,
fav_movie=my_favorite,
mapper=movie_to_idx,
n_recommendations=10
)
```
#### File: domain/msd/models.py
```python
from dataclasses import dataclass
@dataclass
class User:
id: str
def __hash__(self):
return hash(self.id)
def __eq__(self, other):
return self.id == other.id
@dataclass
class Song:
id: str
kaggle_index: int
def __hash__(self):
return hash(self.id)
def __eq__(self, other):
return self.id == other.id
@dataclass
class Triplet:
user: User
song: Song
count: int
```
#### File: domain/msd/sets.py
```python
from abc import ABC
from recommender.domain.msd.models import Song, Triplet, User
from typing import Dict, Iterable, List, Set, Tuple
class MSDDataReader(ABC):
def read_triplets(self) -> Iterable[Tuple[str, str, int]]:
raise NotImplementedError()
def read_users(self) -> Iterable[str]:
raise NotImplementedError()
def read_songs(self) -> Iterable[Tuple[str, int]]:
raise NotImplementedError()
class UserListens:
users: Dict[str, User] = {}
songs: Dict[str, Song] = {}
reader: MSDDataReader
songs_by_user: Dict[User, Set[Song]] = {}
def __init__(self, reader: MSDDataReader):
self.reader = reader
def load(self):
for userid in self.reader.read_users():
self.users[userid] = User(userid)
for songid, kaggle_index in self.reader.read_songs():
self.songs[songid] = Song(songid, kaggle_index)
i = 0
for userid, songid, count in self.reader.read_triplets():
user = self.users[userid]
song = self.songs[songid]
triplet = Triplet(user, song, count)
if user in self.songs_by_user:
self.songs_by_user[user].add(song)
else:
self.songs_by_user[user] = set([song])
i += 1
if i > 100:
break
def get_users(self) -> Iterable[User]:
return self.users.values()
def get_songs(self) -> Iterable[Song]:
return self.songs.values()
def get_user_songs(self, user: User):
return self.songs_by_user.get(user, [])
def user_has_listened_to(self, user: User, song: Song):
return song in self.songs_by_user[user]
```
#### File: http/flask/server.py
```python
from flask import Flask
class FlaskServer:
app = Flask("mgr")
@app.route('/')
def hello_world():
return 'Hello, World!'
def get_server(self):
return self.app
```
#### File: recommender/infrastructure/lastfm.py
```python
import logging
from datetime import datetime
from time import sleep
from recommender.domain.artist import Artist
import pylast
from typing import Iterable, List
from ..domain.track import Track
class LastFMListeningRepository:
logger: logging.Logger
def __init__(
self, logger: logging.Logger, apikey, apisecret, username, password
) -> None:
self.logger = logger
self.lastfm = pylast.LastFMNetwork(
api_key=apikey,
api_secret=apisecret
)
self.username = username
def get_tracks(
self, time_from: datetime, time_to: datetime, retry=0
) -> Iterable[Track]:
user = self.lastfm.get_user(self.username)
try:
rawtracks: List[pylast.PlayedTrack] = user.get_recent_tracks(
limit=None,
time_from=int(time_from.timestamp()),
time_to=int(time_to.timestamp()),
)
except pylast.WSError as e:
next_retry = retry + 1
delay = next_retry**2
self.logger.warn(f"{e}. Next try in {delay} seconds")
sleep(delay)
return self.get_tracks(time_from, time_to, next_retry)
for t in rawtracks:
# So that we don't hit the Lastfm api limits
sleep(1)
try:
yield self.as_track(t)
except Exception as e:
self.logger.warn(f"Could not retrieve track {e}")
def as_track(self, raw: pylast.PlayedTrack) -> Track:
raw_artist: pylast.Artist = raw.track.artist
raw_track: pylast.Track = raw.track
raw_tags: List[pylast.TopItem] = raw_track.get_top_tags(limit=10)
# to be able to retrieve user counts
raw_track.username = self.username
artist = Artist(
raw_artist.get_correction() or "",
raw_artist.get_mbid(),
)
if raw.timestamp:
played_at = datetime.utcfromtimestamp(int(raw.timestamp))
else:
played_at = None
return Track(
artist,
raw_track.get_correction() or "",
[t.item.get_name() for t in raw_tags],
raw_track.get_userloved() or False,
int(raw_track.get_userplaycount() or 0),
int(raw_track.get_playcount()),
played_at,
raw_track.get_mbid()
)
```
#### File: msd/loaders/users.py
```python
import os
def load(data_dir):
"""
Load users list from Kaggle data
"""
users_filepath = os.path.join(
data_dir, "kaggle_users.txt"
)
with open(users_filepath, "r") as f:
return [line.strip() for line in f.readlines()]
```
#### File: tests/unit/scoring_test.py
```python
from recommender.domain.scoring import msd_average_precision
def test_ap_1():
recommendation = [1, 2, 3]
actual = [1, 2, 3]
ap = msd_average_precision(recommendation, actual)
assert ap == 1
def test_ap_0():
recommendation = [4, 5, 6]
actual = [1, 2, 3]
ap = msd_average_precision(recommendation, actual)
assert ap == 0
def test_ap():
recommendation = [5, 6, 1, 2, 3]
actual = [1, 2, 3, 5, 6]
ap = msd_average_precision(recommendation, actual)
assert ap == 0.8
``` |
{
"source": "jramcast/nuublo-predict",
"score": 3
} |
#### File: jramcast/nuublo-predict/train.py
```python
import csv
from classify import classifier
def main():
print('Reading CSV...')
csvfile = open('classify/data/train.csv', newline='')
datareader = csv.DictReader(csvfile)
data = list(datareader)
print('Training...')
classifier.train(data)
if __name__ == "__main__":
main()
``` |
{
"source": "jramcast/rasa_core",
"score": 2
} |
#### File: rasa_core/channels/socketio.py
```python
import logging
from typing import Optional, Text, Any, List, Dict
import socketio
from flask import Blueprint, jsonify
from rasa_core.channels import InputChannel
from rasa_core.channels.channel import (
UserMessage,
OutputChannel)
logger = logging.getLogger(__name__)
class SocketBlueprint(Blueprint):
def __init__(self, sio, socketio_path, *args, **kwargs):
self.sio = sio
self.socketio_path = socketio_path
super(SocketBlueprint, self).__init__(*args, **kwargs)
def register(self, app, options, first_registration=False):
app.wsgi_app = socketio.Middleware(self.sio, app.wsgi_app,
self.socketio_path)
super(SocketBlueprint, self).register(app, options, first_registration)
class SocketIOOutput(OutputChannel):
@classmethod
def name(cls):
return "socketio"
def __init__(self, sio, bot_message_evt):
self.sio = sio
self.bot_message_evt = bot_message_evt
def send(self, recipient_id: Text, message: Any) -> None:
"""Sends a message to the recipient."""
self.sio.emit(message, room=recipient_id)
def _send_message(self, recipient_id: Text, response: Any) -> None:
"""Sends a message to the recipient using the bot event."""
self.sio.emit(self.bot_message_evt, response, room=recipient_id)
def send_text_message(self, recipient_id: Text, message: Text) -> None:
"""Send a message through this channel."""
self._send_message(recipient_id, {"text": message})
def send_image_url(self, recipient_id: Text, image_url: Text) -> None:
"""Sends an image. Default will just post the url as a string."""
message = {
"attachment": {
"type": "image",
"payload": {"src": image_url}
}
}
self._send_message(recipient_id, message)
def send_text_with_buttons(self, recipient_id: Text, text: Text,
buttons: List[Dict[Text, Any]],
**kwargs: Any) -> None:
"""Sends buttons to the output."""
message = {
"text": text,
"quick_replies": []
}
for button in buttons:
message["quick_replies"].append({
"content_type": "text",
"title": button['title'],
"payload": button['payload']
})
self._send_message(recipient_id, message)
def send_custom_message(self, recipient_id: Text,
elements: List[Dict[Text, Any]]) -> None:
"""Sends elements to the output."""
message = {"attachment": {
"type": "template",
"payload": {
"template_type": "generic",
"elements": elements[0]
}}}
self._send_message(recipient_id, message)
class SocketIOInput(InputChannel):
"""A socket.io input channel."""
@classmethod
def name(cls):
return "socketio"
@classmethod
def from_credentials(cls, credentials):
credentials = credentials or {}
return cls(credentials.get("user_message_evt", "user_uttered"),
credentials.get("bot_message_evt", "bot_uttered"),
credentials.get("namespace"))
def __init__(self,
user_message_evt: Text = "user_uttered",
bot_message_evt: Text = "bot_uttered",
namespace: Optional[Text] = None,
socketio_path='/socket.io' # type: Optional[Text]
):
self.bot_message_evt = bot_message_evt
self.user_message_evt = user_message_evt
self.namespace = namespace
self.socketio_path = socketio_path
def blueprint(self, on_new_message):
sio = socketio.Server()
socketio_webhook = SocketBlueprint(sio, self.socketio_path,
'socketio_webhook', __name__)
@socketio_webhook.route("/", methods=['GET'])
def health():
return jsonify({"status": "ok"})
@sio.on('connect', namespace=self.namespace)
def connect(sid, environ):
logger.debug("User {} connected to socketio endpoint.".format(sid))
@sio.on('disconnect', namespace=self.namespace)
def disconnect(sid):
logger.debug("User {} disconnected from socketio endpoint."
"".format(sid))
@sio.on(self.user_message_evt, namespace=self.namespace)
def handle_message(sid, data):
output_channel = SocketIOOutput(sio, self.bot_message_evt)
message = UserMessage(data['message'], output_channel, sid,
input_channel=self.name())
on_new_message(message)
return socketio_webhook
``` |
{
"source": "jramirez857/ploomber",
"score": 2
} |
#### File: assets/nbs-factory/nbs_factory.py
```python
from ploomber import DAG
from ploomber.tasks import NotebookRunner
from pathlib import Path
import tempfile
from ploomber.products import File
def make():
dag = DAG()
NotebookRunner(Path('clean.py'),
File(Path(tempfile.mkdtemp()) / 'file.html'),
dag=dag,
name='clean')
return dag
``` |
{
"source": "jramirez857/projects",
"score": 2
} |
#### File: cookbook/python-load/pipeline.py
```python
from ploomber.spec import DAGSpec
from ploomber import with_env
@with_env('env.yaml')
# NOTE: you may add other params to the function, they'll show up in the cli
def make(env):
dag = DAGSpec('pipeline.yaml', env=dict(env)).to_dag()
# NOTE: return the DAG, do not call any methods
return dag
```
#### File: variable-number-of-products/serializer/tasks.py
```python
import json
from random import randint
def variable():
"""
A task that generates a variable number of products (keys are filenames,
values are products)
"""
return {f'{x}.txt': str(x) for x in range(randint(1, 5))}
def many_products_one_variable():
"""
A task that generates a fixed-size product ('one') and a variable-size
product ('variable')
"""
return {
'one': 1,
'variable': {f'{x}.txt': str(x)
for x in range(randint(1, 5))}
}
def variable_downstream(upstream):
"""
A task that dumps to JSON the output of "variable"
"""
return json.dumps(upstream['variable'])
def many_products_one_variable_downstream(upstream):
"""
A task that dumps to JSON the output "variable" of
"many_products_one_variable"
"""
return json.dumps(upstream['many_products_one_variable']['variable'])
```
#### File: guides/serialization/custom.py
```python
from pathlib import Path
import pickle
import json
from ploomber.io import serializer, unserializer
@serializer()
def my_pickle_serializer(obj, product):
Path(product).write_bytes(pickle.dumps(obj))
@unserializer()
def my_pickle_unserializer(product):
return pickle.loads(Path(product).read_bytes())
def write_json(obj, product):
Path(product).write_text(json.dumps(obj))
def read_json(product):
return json.loads(Path(product).read_text())
@serializer({'.json': write_json})
def my_serializer(obj, product):
Path(product).write_bytes(pickle.dumps(obj))
@unserializer({'.json': read_json})
def my_unserializer(product):
return pickle.loads(Path(product).read_bytes())
@serializer({'.json': write_json}, fallback=True)
def my_fallback_serializer(obj, product):
pass
@unserializer({'.json': read_json}, fallback=True)
def my_fallback_unserializer(product):
pass
@serializer(fallback=True, defaults=['.json'])
def my_defaults_serializer(obj, product):
pass
@unserializer(fallback=True, defaults=['.json'])
def my_defaults_unserializer(product):
pass
```
#### File: guides/serialization/tasks.py
```python
def first():
return dict(a=1, b=2)
def second(upstream):
first = upstream['first']
another = dict(a=first['b'] + 1, b=first['a'] + 1)
final = dict(a=100, b=200)
return dict(another=another, final=final)
```
#### File: guides/testing/db.py
```python
from ploomber.clients import SQLAlchemyClient
def get_client():
return SQLAlchemyClient('sqlite:///data.db')
```
#### File: guides/testing/integration_tests.py
```python
import pandas as pd
from ploomber.testing.sql import nulls_in_columns, range_in_column
def test_sql_clean(client, product):
"""Tests for clean.sql
"""
assert not nulls_in_columns(client, ['score', 'age'], product)
min_age, max_age = range_in_column(client, 'age', product)
assert min_age > 0
def test_py_transform(product):
"""Tests for transform.py
"""
df = pd.read_csv(str(product['data']))
assert not df.multiplied_score.isna().sum()
assert df.multiplied_score.min() >= 0
```
#### File: src/ploomberutils/readme.py
```python
from dataclasses import dataclass
from pathlib import Path
from glob import glob
import jupytext
from jinja2 import Template
import papermill as pm
from ploomber.tasks import PythonCallable
from ploomber.products import File
from jupyblog import md
@dataclass
class Example:
path: str
idx: int
@property
def description(self):
content = Path(self.path, '_source.md').read_text()
try:
return md.extract_between_line_content(
content, ('<!-- start description -->',
'<!-- end description -->')).strip()
except Exception as e:
raise ValueError(
f'Could not extract description from {self.path}') from e
@property
def category(self):
return str(Path(self.path).parent)
def to_csv(self):
elements = [self.idx, self.category, self.path, self.description]
return ','.join(f'"{e}"' for e in elements)
def _render(resources_, product):
"""
Generate README.md from _source.md
"""
templates = [
Example(path=path, idx=idx)
for idx, path in enumerate(sorted(glob('templates/*')), start=1)
]
cookbook = [
Example(path=path, idx=idx)
for idx, path in enumerate(sorted(glob('cookbook/*')), start=1)
]
guides = [
Example(path=path, idx=idx)
for idx, path in enumerate(sorted(glob('guides/*')), start=1)
]
template = Template(Path(resources_['source']).read_text())
out = template.render(cookbook=cookbook,
guides=guides,
templates=templates)
Path(product['readme']).write_text(out)
lines = ['"idx","category","name","description"']
for section in [templates, cookbook, guides]:
for example in section:
lines.append(example.to_csv())
Path(product['index']).write_text('\n'.join(lines))
def render(dag):
return PythonCallable(_render, {
'readme': File('README.md'),
'index': File('_index.csv')
},
dag=dag,
params=dict(resources_=dict(source='_source.md')))
def _execute(upstream, product):
"""
Generate README.ipynb from _source.md
"""
nb = jupytext.read(str(upstream.first['readme']))
jupytext.write(nb, str(product))
pm.execute_notebook(str(product), str(product), kernel_name='python3')
def execute(dag):
return PythonCallable(_execute, File('README.ipynb'), dag=dag)
```
#### File: python-api-examples/examples/param_grid.py
```python
from datetime import date
import numpy as np
import pandas as pd
from dateutil.relativedelta import relativedelta
from ploomber import DAG
from ploomber.executors import Serial
from ploomber.tasks import PythonCallable, TaskGroup
from ploomber.products import File
from ploomber.util import ParamGrid, Interval
def get_data(product, dates):
"""
Dummy code, in reality this would usually be a Task that pulls data
from a database
"""
dates_series = pd.date_range(start=dates[0],
end=dates[1],
closed='left',
freq='D')
values = np.random.rand(dates_series.shape[0])
df = pd.DataFrame({'dates': dates_series, 'values': values})
df.to_parquet(str(product))
dag = DAG()
# NOTE: this is only required for testing purpose
dag.executor = Serial(build_in_subprocess=False)
start_date = date(year=2010, month=1, day=1)
end_date = date(year=2019, month=6, day=1)
delta = relativedelta(years=1)
params_array = ParamGrid({
'dates': Interval(start_date, end_date, delta)
}).zip()
def namer(params):
s = str(params['dates'][0]).replace('-', '_')
e = str(params['dates'][1]).replace('-', '_')
return 'get_data_{}_{}'.format(s, e)
TaskGroup.from_params(task_class=PythonCallable,
product_class=File,
product_primitive='products/{{name}}.parquet',
task_kwargs={'source': get_data},
dag=dag,
params_array=params_array,
namer=namer)
dag.plot()
dag.build()
```
#### File: python-api-examples/tests/test_guide.py
```python
import shutil
import subprocess
from pathlib import Path
import os
# we have to use this, nbconvert removes cells that execute shell comands
import jupytext
import pytest
from conftest import _path_to_tests
_base = str(_path_to_tests().parent / 'guide')
nbs = [Path(_base, f) for f in os.listdir(_base) if f.endswith('.ipynb')]
# we cannot use papermill since some notebooks use papermill via NotebookRunner
# there is an issue when this happens, so we just run it as scripts using
# ipython directly
def run_notebook(nb):
print('Running %s' % nb)
out = 'nb.py'
jupytext.write(jupytext.read(nb), out)
# jupytext keeps shell commands but adds them as comments, fix
lines = []
for line in Path(out).read_text().splitlines():
# https://stackoverflow.com/a/29262880/709975
if line.startswith('# !'):
line = 'get_ipython().magic("sx %s")' % line[2:]
lines.append(line)
Path(out).write_text('\n'.join(lines))
exit_code = subprocess.call(['ipython', 'nb.py'])
return exit_code
@pytest.mark.parametrize('nb', nbs, ids=[Path(nb).name for nb in nbs])
def test_guide(nb, tmp_directory):
# TODO: add timeout
name = Path(nb).name
shutil.copy(nb, name)
assert run_notebook(name) == 0
```
#### File: src/ml_advanced/pipeline.py
```python
from ploomber import with_env, DAGConfigurator, SourceLoader
from ploomber.tasks import PythonCallable, NotebookRunner
from ploomber.products import File
from ploomber.executors import Parallel
from ml_advanced import tasks
@with_env
def make(env):
"""
Make pipeline
Returns
-------
ploomber.DAG
A pipeline object
Notes
-----
This is a numpy docstring, which is the standard format in the Python data
ecosystem: https://numpydoc.readthedocs.io/en/latest/format.html
"""
# this function is used by the entry points to automatically initialize
# the env in env.yaml
return _make(env)
def _make(env):
# this is the private function we use to pass the testing environment
cfg = DAGConfigurator(env.dag_config)
dag = cfg.create(name='ml-pipeline')
# run this in parallel
dag.executor = Parallel(processes=3)
loader = SourceLoader(module='ml_advanced.templates')
get = PythonCallable(tasks.get,
File(env.path.data / 'data.parquet'),
dag,
name='get',
params={'sample_frac': env.sample_frac})
fts = PythonCallable(tasks.features,
File(env.path.data / 'features.parquet'),
dag,
name='features')
join = PythonCallable(tasks.join,
File(env.path.data / 'join.parquet'),
dag,
name='join')
get >> fts
(get + fts) >> join
model_classes = [
'sklearn.ensemble.RandomForestClassifier',
# these come from our package, they return a sklearn Pipeline object
'ml_advanced.models.logistic_reg',
'ml_advanced.models.svc',
]
model_param_grids = [
dict(n_estimators=[5, 10, 50, 100], min_samples_leaf=[2, 4, 8]),
dict(clf__penalty=['l1', 'l2'], clf__C=[0.5, 1.0]),
dict(clf__kernel=['linear', 'poly', 'rbf'], clf__C=[0.5, 1.0]),
]
for model_class, model_params in zip(model_classes, model_param_grids):
fit = NotebookRunner(
loader['fit.py'],
product={
'nb': File(env.path.data / f'fit-{model_class}.ipynb'),
'model': File(env.path.data / f'model-{model_class}.joblib')
},
dag=dag,
params={
'model_class': model_class,
'model_params': model_params
},
# NOTE: Argo does not support "." nor "_" in task names. Not
# needed if only running locally
name='fit-' + model_class.replace('.', '--').replace('_', '-'))
join >> fit
return dag
```
#### File: ml-advanced/tests/conftest.py
```python
import pytest
def pytest_addoption(parser):
parser.addoption(
"--force",
action="store_true",
default=False,
help="Force build"
)
@pytest.fixture
def force(request):
return request.config.getoption("--force")
```
#### File: templates/mlflow/hooks.py
```python
import mlflow
from nbconvert import HTMLExporter
from sklearn_evaluation import NotebookIntrospector
def store_report(product, params):
if params['track']:
nb = NotebookIntrospector(product)
run_id = nb['mlflow-run-id'].strip()
# https://nbconvert.readthedocs.io/en/latest/config_options.html#preprocessor-options
exporter = HTMLExporter()
# hide code cells
exporter.exclude_input = True
body, _ = exporter.from_filename(product)
with mlflow.start_run(run_id):
mlflow.log_text(body, 'nb.html')
```
#### File: ml-intermediate/tasks/get.py
```python
from pathlib import Path
import pandas as pd
from sklearn import datasets
def fn(product, sample):
"""Get data
"""
Path(str(product)).parent.mkdir(parents=True, exist_ok=True)
d = datasets.load_iris()
df = pd.DataFrame(d['data'])
df.columns = d['feature_names']
df['target'] = d['target']
if sample:
df = df.sample(frac=0.1)
df.to_parquet(str(product))
```
#### File: ml-intermediate/tasks/join.py
```python
import pandas as pd
def fn(upstream, product):
"""Join raw data with generated features
"""
first = pd.read_parquet(str(upstream['get']))
sepal = pd.read_parquet(str(upstream['sepal-area']))
petal = pd.read_parquet(str(upstream['petal-area']))
df = first.join(sepal).join(petal)
df.to_parquet(str(product))
```
#### File: src/ml_online/infer.py
```python
import pickle
from importlib import resources
from ploomber import OnlineDAG
import ml_online
class InferencePipeline(OnlineDAG):
"""Pipeline for online predictions
Examples
--------
>>> from ml_online.infer import InferencePipeline
>>> pipeline = InferencePipeline()
>>> get = {"sepal length (cm)": 5.1, "sepal width (cm)": 3.5,
... "petal length (cm)": 1.4, "petal width (cm)": 0.2}
>>> pipeline.predict(get=get)
"""
@staticmethod
def get_partial():
with resources.path(ml_online,
'pipeline-features.yaml') as path_to_spec:
path = path_to_spec
return path
@staticmethod
def terminal_params():
model = pickle.loads(resources.read_binary(ml_online, 'model.pickle'))
return dict(model=model)
@staticmethod
def terminal_task(upstream, model):
return int(model.predict(upstream['features'])[0])
```
#### File: src/ml_online/service.py
```python
from flask import Flask, request, jsonify
import pandas as pd
from ml_online.infer import InferencePipeline
pipeline = InferencePipeline()
app = Flask(__name__)
@app.route('/', methods=['POST'])
def predict():
request_data = request.get_json()
get = pd.DataFrame(request_data, index=[0])
out = pipeline.predict(get=get)
return jsonify({'prediction': int(out['terminal'])})
``` |
{
"source": "jramirez857/soorgeon",
"score": 3
} |
#### File: _kaggle/look-at-this-note-feature-engineering-is-easy/nb.py
```python
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
from sklearn.model_selection import train_test_split
import seaborn as sns
import matplotlib.pyplot as plt
# %%
train = pd.read_csv('input/train.csv')
train = train.drop(['Id'], axis=1)
pd.set_option('display.max_columns', None)
train.head()
# %%
# data segmentation
X = train.drop('SalePrice', axis=1)
y = train['SalePrice']
train_x, test_x, train_y, test_y = train_test_split(
X, y, test_size=0.2, shuffle=True, random_state=0) # train, valid 8:2 분할
# %%
# We need to duplicate the original state of our training data and test data.
train_x_saved = train_x.copy()
test_x_saved = test_x.copy()
# Functions that return training data and test data
def load_data(train_x_saved, test_x_saved):
train_x, test_x = train_x_saved.copy(), test_x_saved.copy()
return train_x, test_x
# %%
# Store the numeric variable to be converted into a list
num_cols = [
'MSSubClass', 'LotFrontage', 'LotArea', 'OverallQual', 'OverallCond',
'YearBuilt', 'YearRemodAdd', 'MasVnrArea', 'BsmtFinSF1', '1stFlrSF',
'2ndFlrSF', 'GrLivArea', 'GarageYrBlt', 'GarageArea', 'WoodDeckSF'
]
# %% [markdown]
# # Linear Transform
# %% [markdown]
# <div style="background-color:rgba(0, 255, 255, 0.6);border-radius:5px;display:fill;">
# <h1><center style ="margin-left : 20px;">Standardization</center></h1>
# </div>
# %% [markdown]
# #### This is the most basic transformation method.
# #### It is a method that makes the mean 0 and the standard deviation 1 through a linear transformation!
# %% [markdown]
# 
# %%
# Load Data
train_x, test_x = load_data(train_x_saved=train_x_saved,
test_x_saved=test_x_saved)
# %%
from sklearn.preprocessing import StandardScaler
# %%
scaler = StandardScaler()
scaler.fit(train_x[num_cols])
# %%
# Permuting each column after normalization
train_x[num_cols] = scaler.transform(train_x[num_cols])
test_x[num_cols] = scaler.transform(test_x[num_cols])
# %% [markdown]
# <div style="background-color:red;border-radius:5px;display:fill;">
# <h1><center style ="margin-left : 20px;">BAD Standardization</center></h1>
# </div>
# %% [markdown]
# #### In this method, training data and test data are transformed according to the mean and standard deviation of different criteria.
#
# #### If the distribution of each data does not differ significantly from each other, it is not a problem. However, this method should not be used. 💥
# %%
train_x, test_x = load_data(train_x_saved=train_x_saved,
test_x_saved=test_x_saved)
# %%
from sklearn.preprocessing import StandardScaler
# %%
# Normalize training data and test data respectively (bad example)
scaler_train = StandardScaler()
scaler_train.fit(train_x[num_cols])
train_x[num_cols] = scaler_train.transform(train_x[num_cols])
scaler_test = StandardScaler()
scaler_test.fit(test_x[num_cols])
test_x[num_cols] = scaler_test.transform(test_x[num_cols])
# %% [markdown]
# <div style="background-color:rgba(0, 255, 255, 0.6);border-radius:5px;display:fill;">
# <h1><center style ="margin-left : 20px;">Min-Max Scaling</center></h1>
# </div>
# %% [markdown]
# #### This is a Min-Max Scaling method that converts the range taken by the variable value into a specific interval (between 0 and 1).
# %% [markdown]
# 
# %%
train_x, test_x = load_data(train_x_saved=train_x_saved,
test_x_saved=test_x_saved)
# %%
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
scaler.fit(train_x[num_cols])
# %%
train_x[num_cols] = scaler.transform(train_x[num_cols])
test_x[num_cols] = scaler.transform(test_x[num_cols])
# %%
train_x[num_cols].describe().T.style.bar(subset=['mean'], color='#205ff2')\
.background_gradient(subset=['min'], cmap='Reds')\
.background_gradient(subset=['max'], cmap='coolwarm')
## The minimum value is 0 and the maximum value is 1.
# %% [markdown]
# # Non-linear Transformation
# %% [markdown]
# <div style="background-color:rgba(0, 255, 255, 0.6);border-radius:5px;display:fill;">
# <h1><center style ="margin-left : 20px;">Log</center></h1>
# </div>
# %% [markdown]
# #### It is recommended that the distribution of variables is not skewed to one side.
#
# #### For example, a variable representing a specific amount or number of times tends
#
# #### to have a distribution that is biased in one direction,
#
# #### so log transformation is sometimes performed. And when the value is 0,
#
# #### log(x+1) transformation is often used because it cannot take the log as it is.
# %%
train_x, test_x = load_data(train_x_saved=train_x_saved,
test_x_saved=test_x_saved)
# %%
x = train_x[num_cols]
# %%
# take log
x1 = np.log(x)
x1
# %%
# Add 1 and then take the logarithm
x2 = np.log1p(x)
x2
# %%
# After taking the logarithm of the absolute value, add the original sign
x3 = np.sign(x) * np.log(np.abs(x))
x3
# %% [markdown]
# <div style="background-color:rgba(0, 255, 255, 0.6);border-radius:5px;display:fill;">
# <h1><center style ="margin-left : 20px;">Box-Cox Transform</center></h1>
# </div>
# %% [markdown]
# #### In addition to the BOX-COX Transform, which is a generalized log transformation,
#
# #### there is also the Yeo-Johnson Transform that can be applied to variables with negative values.
# #### These transformations approximate a normal distribution after log transformation.
# %% [markdown]
# 
# %%
train_x, test_x = load_data(train_x_saved=train_x_saved,
test_x_saved=test_x_saved)
# %%
# Storing variables that take only positive integer values as conversion targets in a list
# Also, if missing values are included, be careful because (~(train_x[c] <= 0.0)).all() should be used.
pos_cols = [
c for c in num_cols
if (train_x[c] > 0.0).all() and (test_x[c] > 0.0).all()
]
## List of features with positive values
pos_cols
# %%
from sklearn.preprocessing import PowerTransformer
# %%
pt = PowerTransformer(method='box-cox')
pt.fit(train_x[pos_cols])
# %%
# 변환 후의 데이터로 각 열을 치환
train_x[pos_cols] = pt.transform(train_x[pos_cols])
test_x[pos_cols] = pt.transform(test_x[pos_cols])
# %% [markdown]
# #### LotArea column before after comparison
# %%
x = train.LotArea.values
sns.kdeplot(x)
plt.title("before Box-Cox-transform")
plt.show()
# %%
x = train_x.LotArea.values
sns.kdeplot(x)
plt.title("after Box-Cox-transform")
plt.show()
## The existing data also has a form of a normal distribution,
## so there is little difference between it and after the Box-Cox transformation.
# %% [markdown]
# #### GrLivArea column before after comparison
# %%
x = train.GrLivArea.values
sns.kdeplot(x)
plt.title("before Box-Cox-transform")
plt.show()
# %%
x = train_x.GrLivArea.values
sns.kdeplot(x)
plt.title("after Box-Cox-transform")
plt.show()
## The existing data also has a form of a normal distribution,
## so there is little difference between it and after the Box-Cox transformation.
# %% [markdown]
# <div style="background-color:rgba(0, 255, 255, 0.6);border-radius:5px;display:fill;">
# <h1><center style ="margin-left : 20px;">Yeo-Johnson Transform</center></h1>
# </div>
# %% [markdown]
# #### Yeo-Johnson transform can also take negative values.
# %% [markdown]
# 
# %%
train_x, test_x = load_data(train_x_saved=train_x_saved,
test_x_saved=test_x_saved)
# %%
from sklearn.preprocessing import PowerTransformer
# %%
pt = PowerTransformer(method='yeo-johnson')
pt.fit(train_x[num_cols])
# %%
# 변환 후의 데이터로 각 열을 치환
train_x[num_cols] = pt.transform(train_x[num_cols])
test_x[num_cols] = pt.transform(test_x[num_cols])
# %%
train_x[num_cols]
# %%
import plotly.graph_objects as go
fig = go.Figure()
fig.add_trace(
go.Histogram(
x=train.MSSubClass,
xbins=dict( # bins used for histogram
start=-100, end=200),
marker_color='blue',
opacity=1))
fig.update_layout(
title_text='MSSubClass yeo-johnson Before',
xaxis_title_text='MSSubClass',
yaxis_title_text='Value',
bargap=0.05, # gap between bars of adjacent location coordinates
xaxis={'showgrid': False},
yaxis={'showgrid': False},
template='plotly_dark')
fig.show()
# %%
import plotly.graph_objects as go
fig = go.Figure()
fig.add_trace(
go.Histogram(
x=train_x.MSSubClass,
xbins=dict( # bins used for histogram
start=0, end=200),
marker_color='blue',
opacity=1))
fig.update_layout(
title_text='MSSubClass yeo-johnson After',
xaxis_title_text='MSSubClass',
yaxis_title_text='Value',
bargap=0.05, # gap between bars of adjacent location coordinates
xaxis={'showgrid': False},
yaxis={'showgrid': False},
template='plotly_dark')
fig.show()
## The spread distribution was forced to approximate the normal distribution.
# %% [markdown]
# # Setting TransForm
# %% [markdown]
# <div style="background-color:rgba(0, 255, 255, 0.6);border-radius:5px;display:fill;">
# <h1><center style ="margin-left : 20px;">Clipping</center></h1>
# </div>
# %% [markdown]
# #### Numerical variables sometimes include outliers, but you can exclude outliers
# #### outside a certain range by setting upper and lower limits and replacing values
# #### outside the range with upper and lower limits. It is also a good idea to check the distribution first and then set the threshold.
# %%
train_x, test_x = load_data(train_x_saved=train_x_saved,
test_x_saved=test_x_saved)
# %%
# Check 1%, 99% points of training data per column
p01 = train_x[num_cols].quantile(0.01)
p99 = train_x[num_cols].quantile(0.99)
p01
p99
# %%
# Values below 1% point are clipped to 1% point, and values above 99% point are clipped to 99% point.
train_x[num_cols] = train_x[num_cols].clip(p01, p99, axis=1)
test_x[num_cols] = test_x[num_cols].clip(p01, p99, axis=1)
# %% [markdown]
# #### LotArea column before after comparison
# %%
import plotly.graph_objects as go
fig = go.Figure()
fig.add_trace(
go.Histogram(
x=train.LotArea,
xbins=dict( # bins used for histogram
start=0, end=50000, size=2),
marker_color='#e8ab60',
opacity=1))
fig.update_layout(
title_text='LotArea Clipping Before',
xaxis_title_text='LotArea',
yaxis_title_text='COUNT',
bargap=0.05, # gap between bars of adjacent location coordinates
xaxis={'showgrid': False},
yaxis={'showgrid': False},
template='plotly_dark')
fig.show()
# %%
import plotly.graph_objects as go
fig = go.Figure()
fig.add_trace(
go.Histogram(
x=train_x.LotArea,
xbins=dict( # bins used for histogram
start=0, end=50000),
marker_color='#e8ab60',
opacity=1))
fig.update_layout(
title_text='LotArea Clipping After',
xaxis_title_text='LotArea',
yaxis_title_text='COUNT',
bargap=0.05, # gap between bars of adjacent location coordinates
xaxis={'showgrid': False},
yaxis={'showgrid': False},
template='plotly_dark')
fig.show()
## Values from 0 to 80 are substituted.
# %% [markdown]
# #### RestingBP column before after comparison
# %%
fig = go.Figure()
fig.add_trace(
go.Histogram(
x=train.GrLivArea,
xbins=dict( # bins used for histogram
start=0, end=10000, size=15),
marker_color='#FE6F5E',
opacity=1))
fig.update_layout(
title_text='GrLivArea Clipping Before',
xaxis_title_text='GrLivArea',
yaxis_title_text='COUNT',
bargap=0.05, # gap between bars of adjacent location coordinates
xaxis={'showgrid': False},
yaxis={'showgrid': False},
template='plotly_dark')
fig.show()
# %%
fig = go.Figure()
fig.add_trace(
go.Histogram(
x=train_x.GrLivArea,
xbins=dict( # bins used for histogram
start=0, end=10000, size=15),
marker_color='#FE6F5E',
opacity=1))
fig.update_layout(
title_text='GrLivArea Clipping After',
xaxis_title_text='GrLivArea',
yaxis_title_text='COUNT',
bargap=0.05, # gap between bars of adjacent location coordinates
xaxis={'showgrid': False},
yaxis={'showgrid': False},
template='plotly_dark')
fig.show()
# %% [markdown]
# #### If you look at the graph, you can clearly see that the values are not spread widely but are clustered like a normal distribution.
# %% [markdown]
# <div style="background-color:rgba(0, 255, 255, 0.6);border-radius:5px;display:fill;">
# <h1><center style ="margin-left : 20px;">Rank Gauss</center></h1>
# </div>
# %% [markdown]
# #### This is a method of converting numeric variables into ranks and then semi-forced normal
# #### distributions while maintaining the order. The method used by Kaggle Grandmaster
# #### <NAME> was revealed in the 1st solution in Porto Seguro's Safe Driver Prediction competition.
# #### In particular, it is said to have better performance than general standardization as a transformation when building a model in a neural network.
# %%
train_x, test_x = load_data(train_x_saved=train_x_saved,
test_x_saved=test_x_saved)
from sklearn.preprocessing import QuantileTransformer
# %%
transformer = QuantileTransformer(n_quantiles=100,
random_state=0,
output_distribution='normal')
transformer.fit(train_x[num_cols])
# %%
train_x[num_cols] = transformer.transform(train_x[num_cols])
test_x[num_cols] = transformer.transform(test_x[num_cols])
# %%
train_x[num_cols]
# %%
p = sns.boxplot(x=train.GarageArea, color='teal')
p.set_title("GarageArea RankGauss Before")
plt.show()
# %%
p = sns.boxplot(x=train_x.GarageArea, color='teal')
p.set_title("GarageArea RankGauss After")
plt.show()
# %% [markdown]
# #### The values were semi-forced to be normally distributed. The impact of outliers is also expected to decrease.
# %% [markdown]
# # NEXT PLAN
# %% [markdown]
# #### The following tabular data conversion will deal with numeric conversion of category types.
# #### If you are interested in my kernel, please find the next category type conversion kernel as well.
```
#### File: src/soorgeon/cli.py
```python
import click
from soorgeon import __version__, export
@click.group()
@click.version_option(__version__)
def cli():
pass
@cli.command()
@click.argument('path', type=click.Path(exists=True))
@click.option('--log', '-l', default=None)
@click.option(
'--df-format',
'-d',
default=None,
type=click.Choice(('parquet', 'csv')),
help='Format for variables with the df prefix. Otherwise uses pickle')
@click.option('--product-prefix',
'-p',
default=None,
help='Prefix for all products')
@click.option('--single-task',
'-s',
is_flag=True,
help='Create a pipeline with a single task')
@click.option(
'--file-format',
'-f',
default=None,
type=click.Choice(('py', 'ipynb')),
help=('Format for pipeline tasks, if empty keeps the same format '
'as the input'))
def refactor(path, log, product_prefix, df_format, single_task, file_format):
"""
Refactor a monolithic notebook.
$ soorgeon refactor nb.ipynb
* Sections must be separated by markdown H2 headings
* Star imports (from math import *) not supported
* Functions should not use global variables
User guide: https://github.com/ploomber/soorgeon/blob/main/doc/guide.md
"""
export.refactor(path,
log,
product_prefix=product_prefix,
df_format=df_format,
single_task=single_task,
file_format=file_format)
click.secho(f'Finished refactoring {path!r}, use Ploomber to continue.',
fg='green')
click.echo("""
Install dependencies (this will install ploomber):
$ pip install -r requirements.txt
List tasks:
$ ploomber status
Execute pipeline:
$ ploomber build
Plot pipeline (this requires pygraphviz, which isn't installed by default):
$ ploomber plot
* Documentation: https://docs.ploomber.io
* Jupyter integration: https://ploomber.io/s/jupyter
* Other editors: https://ploomber.io/s/editors
""")
```
#### File: src/soorgeon/export.py
```python
import shutil
import traceback
import ast
import pprint
from collections import namedtuple
from pathlib import Path
import logging
from importlib import resources
from soorgeon import assets
import click
import parso
import jupytext
import yaml
import nbformat
from soorgeon import (split, io, definitions, proto, exceptions, magics,
pyflakes)
logger = logging.getLogger(__name__)
pp = pprint.PrettyPrinter(indent=4)
class NotebookExporter:
"""Converts a notebook into a Ploomber pipeline
"""
def __init__(self, nb, verbose=True, df_format=None, py=False):
if df_format not in {None, 'parquet', 'csv'}:
raise ValueError("df_format must be one of "
"None, 'parquet' or 'csv', "
f"got: {df_format!r}")
# NOTE: we're commenting magics here but removing them in ProtoTask,
# maybe we should comment magics also in ProtoTask?
nb = magics.comment_magics(nb)
self._nb = nb
self._df_format = df_format
self._verbose = verbose
self._io = None
self._definitions = None
self._tree = None
self._providers = None
self._check()
self._proto_tasks = self._init_proto_tasks(nb, py)
# snippets map names with the code the task will contain, we use
# them to run static analysis
self._snippets = {pt.name: str(pt) for pt in self._proto_tasks}
def export(self, product_prefix=None):
"""Export the project
Parameters
---------
product_prefix : str
A prefix to append to all products. If None, it is set to 'output'
"""
product_prefix = product_prefix or 'output'
# export functions and classes to a separate file
self.export_definitions()
# export requirements.txt
self.export_requirements()
# export .gitignore
self.export_gitignore(product_prefix)
task_specs = self.get_task_specs(product_prefix=product_prefix)
sources = self.get_sources()
dag_spec = {'tasks': list(task_specs.values())}
for name, task_spec in task_specs.items():
path = Path(task_spec['source'])
path.parent.mkdir(exist_ok=True, parents=True)
path.write_text(sources[name])
out = yaml.dump(dag_spec, sort_keys=False)
# pyyaml doesn't have an easy way to control whitespace, but we want
# tasks to have an empty line between them
out = out.replace('\n- ', '\n\n- ')
Path('pipeline.yaml').write_text(out)
self.export_readme()
def _check(self):
"""
Run a few checks before continuing the refactoring. If this fails,
we'll require the user to do some small changes to their code.
"""
code = self._get_code()
_check_syntax(code)
pyflakes.check_notebook(self._nb)
_check_functions_do_not_use_global_variables(code)
_check_no_star_imports(code)
def _init_proto_tasks(self, nb, py):
"""Break notebook into smaller sections
"""
# use H2 headers to break notebook
breaks = split.find_breaks(nb)
# generate groups of cells
cells_split = split.split_with_breaks(nb.cells, breaks)
# extract names by using the H2 header text
names = split.names_with_breaks(nb.cells, breaks)
# initialize proto tasks
return [
proto.ProtoTask(
name,
cell_group,
df_format=self._df_format,
py=py,
) for name, cell_group in zip(names, cells_split)
]
def get_task_specs(self, product_prefix=None):
"""Return task specs (dictionary) for each proto task
"""
return {
pt.name: pt.to_spec(self.io, product_prefix=product_prefix)
for pt in self._proto_tasks
}
def get_sources(self):
"""
Generate the code strings (ipynb or percent format) for each proto task
"""
# FIXME: this calls find_providers, we should only call it once
upstream = io.find_upstream(self._snippets)
code_nb = self._get_code()
return {
pt.name: pt.export(
upstream,
self.io,
self.providers,
code_nb,
self.definitions,
)
for pt in self._proto_tasks
}
def export_definitions(self):
"""Create an exported.py file with function and class definitions
"""
# do not create exported.py if there are no definitions
if not self.definitions:
return
out = '\n\n'.join(self.definitions.values())
ip = io.ImportsParser(self._get_code())
imports = ip.get_imports_cell_for_task(out)
if imports:
exported = f'{imports}\n\n\n{out}'
else:
exported = out
Path('exported.py').write_text(exported)
def export_requirements(self):
"""Generates requirements.txt file, appends it at the end if already
exists
"""
reqs = Path('requirements.txt')
# ploomber is added by default (pinned to >=0.14.7 because earlier
# versions throw an error when using the inline bash IPython magic
# during the static_analysis stage)
pkgs = ['ploomber>=0.14.7'] + definitions.packages_used(self.tree)
# add pyarrow to requirements if needed
if (self._df_format == 'parquet' and 'pyarrow' not in pkgs
and 'fastparquet' not in pkgs):
pkgs = ['pyarrow'] + pkgs
pkgs_txt = '\n'.join(sorted(pkgs))
out = f"""\
# Auto-generated file, may need manual editing
{pkgs_txt}
"""
if reqs.exists():
reqs.write_text(reqs.read_text() + out)
else:
reqs.write_text(out)
def _get_code(self):
"""Returns the source of code cells
"""
return '\n'.join(cell['source'] for cell in self._nb.cells
if cell['cell_type'] == 'code')
def export_gitignore(self, product_prefix):
if product_prefix and not Path(product_prefix).is_absolute():
path = Path('.gitignore')
content = '' if not path.exists() else path.read_text() + '\n'
path.write_text(content + product_prefix + '\n')
self._echo(f'Added {str(product_prefix)!r} directory'
' to .gitignore...')
def export_readme(self):
path = Path('README.md')
if path.exists():
content = path.read_text() + '\n'
self._echo('README.md found, appended auto-generated content')
else:
content = ''
self._echo('Added README.md')
path.write_text(content + resources.read_text(assets, 'README.md'))
def _echo(self, msg):
if self._verbose:
click.echo(msg)
@property
def definitions(self):
if self._definitions is None:
self._definitions = (definitions.from_def_and_class(self.tree))
return self._definitions
@property
def tree(self):
if self._tree is None:
code = self._get_code()
self._tree = parso.parse(code)
return self._tree
@property
def providers(self):
if self._providers is None:
self._providers = io.ProviderMapping(self.io)
return self._providers
@property
def io(self):
"""
{name: (inputs, outputs), ...}
"""
if self._io is None:
io_ = self._get_raw_io()
logging.info(f'io: {pp.pformat(io_)}\n')
self._io = io.prune_io(io_)
logging.info(f'pruned io: {pp.pformat(self._io)}\n')
return self._io
def _get_raw_io(self):
return io.find_io(self._snippets)
FunctionNeedsFix = namedtuple('FunctionNeedsFix', ['name', 'pos', 'args'])
def _check_syntax(code):
try:
ast.parse(code)
except SyntaxError:
error = traceback.format_exc()
else:
error = None
if error:
raise exceptions.InputSyntaxError(f'Could not refactor notebook due '
f'to invalid syntax\n\n {error}')
def _check_no_star_imports(code):
tree = parso.parse(code)
star_imports = [
import_ for import_ in tree.iter_imports() if import_.is_star_import()
]
if star_imports:
star_imports_ = '\n'.join(import_.get_code()
for import_ in star_imports)
url = ('https://github.com/ploomber/soorgeon/blob/main/doc'
'/star-imports.md')
raise exceptions.InputError(
'Star imports are not supported, please change '
f'the following:\n\n{star_imports_}\n\n'
f'For more details, see: {url}')
# see issue #12 on github
def _check_functions_do_not_use_global_variables(code):
tree = parso.parse(code)
needs_fix = []
local_scope = set(definitions.find_defined_names(tree))
for funcdef in tree.iter_funcdefs():
# FIXME: this should be passing the tree directly, no need to reparse
# again, but for some reason,
# using find_inputs_and_outputs_from_tree(funcdef) returns the name
# of the function as an input
in_, _ = io.find_inputs_and_outputs(funcdef.get_code(),
local_scope=local_scope)
if in_:
needs_fix.append(
FunctionNeedsFix(
funcdef.name.value,
funcdef.start_pos,
in_,
))
if needs_fix:
message = ('Looks like the following functions are using global '
'variables, this is unsupported. Please add all missing '
'arguments. See this to learn more:\n'
'https://github.com/ploomber/soorgeon/blob'
'/main/doc/fn-global.md\n\n')
def comma_separated(args):
return ','.join(f"'{arg}'" for arg in args)
message += '\n'.join(
f'* Function {f.name!r} uses variables {comma_separated(f.args)}'
for f in needs_fix)
raise exceptions.InputError(message)
def from_nb(nb, log=None, product_prefix=None, df_format=None, py=False):
"""Refactor a notebook by passing a notebook object
Parameters
----------
product_prefix : str
A prefix to add to all products. If None, it's set to 'output'
"""
if log:
logging.basicConfig(level=log.upper())
exporter = NotebookExporter(nb, df_format=df_format, py=py)
exporter.export(product_prefix=product_prefix)
# TODO: instantiate dag since this may raise issues and we want to capture
# them to let the user know how to fix them (e.g., more >1 H2 headers with
# the same text)
def from_path(path, log=None, product_prefix=None, df_format=None, py=False):
"""Refactor a notebook by passing a path to it
Parameters
----------
allow_single_task : bool
If False, the function will fail if it cannot refactor the notebook
into a multi-stage pipeline. If True, it will first try to refactor
the notebook, and if it fails, it will generate a pipeline with
a single task
"""
from_nb(jupytext.read(path),
log=log,
product_prefix=product_prefix,
df_format=df_format,
py=py)
def single_task_from_path(path, product_prefix, file_format):
"""Refactor a notebook into a single task Ploomber pipeline
"""
path = Path(path)
click.echo('Creating a pipeline with a single task...')
cell = nbformat.v4.new_code_cell(source='upstream = None',
metadata=dict(tags=['parameters']))
nb = jupytext.read(path)
nb.cells.insert(0, cell)
name = path.stem
path_backup = path.with_name(f'{name}-backup{path.suffix}')
# output
ext = path.suffix[1:] if file_format is None else file_format
path_to_task = f'{name}.{ext}'
# create backup
shutil.copy(path, path_backup)
jupytext.write(nb,
path_to_task,
fmt='py:percent' if ext == 'py' else 'ipynb')
spec = {
'tasks': [{
'source':
path_to_task,
'product':
str(Path(product_prefix or 'products', f'{name}-report.ipynb'))
}]
}
pipeline = 'pipeline.yaml'
click.echo(f'Done. Copied code to {path_to_task!r} and added it to '
f'{pipeline!r}. Created backup of original notebook '
f'at {str(path_backup)!r}.')
Path('pipeline.yaml').write_text(yaml.safe_dump(spec, sort_keys=False))
def refactor(path, log, product_prefix, df_format, single_task, file_format):
if single_task:
single_task_from_path(path=path,
product_prefix=product_prefix,
file_format=file_format)
else:
ext = Path(path).suffix[1:] if file_format is None else file_format
try:
from_nb(jupytext.read(path),
log=log,
product_prefix=product_prefix,
df_format=df_format,
py=ext == 'py')
# InputError means the input is broken
except exceptions.InputWontRunError:
raise
# This implies an error on our end
except Exception as e:
cmd = f'soorgeon refactor {path} --single-task'
msg = ('An error occurred when refactoring '
'notebook.\n\nTry refactoring '
f'as a single task pipeline:\n\n$ {cmd}\n\n'
'Error details:\n')
raise exceptions.InputError(msg) from e
```
#### File: soorgeon/tests/test_io_find_inputs_and_outputs.py
```python
import pytest
from soorgeon import io
only_outputs = """
x = 1
y = 2
"""
simple = """
z = x + y
"""
local_inputs = """
x = 1
y = 2
z = x + y
"""
imports = """
import pandas as pd
z = 1
"""
imported_function = """
from sklearn.datasets import load_iris
# load_iris should be considered an input since it's an imported object
df = load_iris(as_frame=True)['data']
"""
# FIXME: another test case but with a class constructor
input_in_function_call = """
import seaborn as sns
sns.histplot(df.some_column)
"""
# TODO: try all combinations of the following examples
input_key_in_function_call = """
import seaborn as sns
sns.histplot(x=df)
"""
input_key_in_function_call_many = """
import seaborn as sns
sns.histplot(x=df, y=df_another)
"""
input_key_in_function_call_with_dot_access = """
import seaborn as sns
sns.histplot(x=df.some_column)
"""
input_existing_object = """
import seaborn as sns
X = 1
sns.histplot(X)
"""
# ignore classes, functions
# try assigning a tuple
# TODO: same but assigning multiple e.g., a, b = dict(), dict()
built_in = """
mapping = dict()
mapping['key'] = 'value'
"""
built_in_as_arg = """
from pkg import some_fn
something = some_fn(int)
"""
# TODO: same but with dot access
modify_existing_obj_getitem = """
mapping = {'a': 1}
mapping['key'] = 'value'
"""
# TODO: same but with dot access
modify_imported_obj_getitem = """
from pkg import mapping
mapping['key'] = 'value'
"""
define_multiple_outputs = """
a, b, c = 1, 2, 3
"""
define_multiple_outputs_square_brackets = """
[a, b, c] = 1, 2, 3
"""
define_multiple_outputs_parenthesis = """
(a, b, c) = 1, 2, 3
"""
define_multiple_outputs_inside_function = """
import do_stuff
def fn():
f, ax = do_stuff()
"""
define_multiple_replace_existing = """
b = 1
b, c = 2, 3
c.stuff()
"""
local_function = """
def x():
pass
y = x()
"""
local_function_with_args = """
def x(z):
pass
y = x(10)
"""
local_function_with_args_and_body = """
def x(z):
another = z + 1
something = another + 1
return another
y = x(10)
"""
local_function_with_kwargs = """
def my_function(a, b, c=None):
return a + b + c
y = my_function(1, 2, 3)
"""
local_class = """
class X:
pass
y = X()
"""
for_loop = """
for x in range(10):
y = x + z
"""
for_loop_many = """
for x, z in range(10):
y = x + z
"""
for_loop_names_with_parenthesis = """
for a, (b, (c, d)) in range(10):
x = a + b + c + d
"""
for_loop_nested = """
for i in range(10):
for j in range(10):
print(i + j)
"""
for_loop_nested_dependent = """
for filenames in ['file', 'name']:
for char in filenames:
print(char)
"""
for_loop_name_reference = """
for _, source in enumerate(10):
some_function('%s' % source)
"""
for_loop_with_input = """
for range_ in range(some_input):
pass
"""
for_loop_with_local_input = """
some_variable = 10
for range_ in range(some_variable):
pass
"""
for_loop_with_input_attribute = """
for range_ in range(some_input.some_attribute):
pass
"""
for_loop_with_input_nested_attribute = """
for range_ in range(some_input.some_attribute.another_attribute):
pass
"""
for_loop_with_input_and_getitem = """
for range_ in range(some_input['some_key']):
pass
"""
for_loop_with_input_and_getitem_input = """
for range_ in range(some_input[some_key]):
pass
"""
for_loop_with_input_and_nested_getitem = """
for range_ in range(some_input[['some_key']]):
pass
"""
for_loop_with_nested_input = """
for idx, range_ in enumerate(range(some_input)):
pass
"""
# TODO: try with other variables such as accessing an attribute,
# or even just having the variable there, like "df"
getitem_input = """
df['x'].plot()
"""
method_access_input = """
df.plot()
"""
overriding_name = """
from pkg import some_function
x, y = some_function(x, y)
"""
# FIXME: test case with global scoped variables accessed in function/class
# definitions
"""
def function(x):
# df may come from another task!
return df + x
"""
list_comprehension = """
[y for y in x]
"""
list_comprehension_attributes = """
[y.attribute for y in x.attribute]
"""
list_comprehension_with_conditional = """
targets = [1, 2, 3]
selected = [x for x in df.columns if x not in targets]
"""
list_comprehension_with_conditional_and_local_variable = """
import pandas as pd
df = pd.read_csv("data.csv")
features = [feature for feature in df.columns]
"""
list_comprehension_with_f_string = """
[f"'{s}'" for s in [] if s not in []]
"""
list_comprehension_with_f_string_assignment = """
y = [f"'{s}'" for s in [] if s not in []]
"""
list_comprehension_nested = """
out = [item for sublist in reduced_cats.values() for item in sublist]
"""
list_comprehension_nested_another = """
out = [[j for j in range(5)] for i in range(5)]
"""
list_comprehension_nested_more = """
out = [[[k for k in range(j)] for j in range(i)] for i in range(5)]
"""
list_comprehension_with_left_input = """
[x + y for x in range(10)]
"""
set_comprehension = """
output = {x for x in numbers if x % 2 == 0}
"""
dict_comprehension = """
output = {x: y + 1 for x in numbers if x % 2 == 0}
"""
dict_comprehension_zip = """
output = {x: y + 1 for x, z in zip(range(10), range(10)) if x % 2 == 0}
"""
function_with_global_variable = """
def some_function(a):
return a + b
"""
# TODO: try with nested brackets like df[['something']]
# TODO: assign more than one at the same time df['a'], df['b'] = ...
mutating_input = """
df['new_column'] = df['some_column'] + 1
"""
# TODO: define inputs inside built-ins
# e.g.
# models = [a, b, c]
# models = {'a': a}
# TODO: we need a general function that finds the names after an =
# e.g. a = something(x=1, b=something)
# a = dict(a=1)
# b = {'a': x}
# this is an special case: since df hasn't been declared locally, it's
# considered an input even though it's on the left side of the = token,
# and it's also an output because it's modifying df
mutating_input_implicit = """
df['column'] = 1
"""
# counter example, local modification inside a function - that's ok
function_mutating_local_object = """
def fn():
x = object()
x['key'] = 1
return x
"""
# add a case like failure but within a function
"""
def do(df):
df['a'] = 1
"""
# there's also this problem if we mutatein a for loop
"""
# df becomes an output!
for col in df:
col['x'] = col['x'] + 1
"""
# non-pure functions are problematic, too
"""
def do(df):
df['a'] = 1
# here, df is an input that we should we from another task, but it should
# also be considered an output since we're mutating it, and, if the next
# task needs it, it'll need this version
do(df)
"""
nested_function_arg = """
import pd
pd.DataFrame({'key': y})
"""
nested_function_kwarg = """
import pd
pd.DataFrame(data={'key': y})
"""
# TODO: test nested context managers
context_manager = """
with open('file.txt') as f:
x = f.read()
"""
f_string = """
f'{some_variable} {a_number:.2f} {an_object!r} {another!s}'
"""
f_string_assignment = """
s = f'{some_variable} {a_number:.2f} {an_object!r} {another!s}'
"""
class_ = """
class SomeClass:
def __init__(self, param):
self._param = param
def some_method(self, a, b=0):
return a + b
some_object = SomeClass(param=1)
"""
lambda_ = """
lambda x: x
"""
lambda_with_input = """
lambda x: x + y
"""
lambda_as_arg = """
import something
something(1, lambda x: x)
"""
lambda_assignment = """
out = lambda x: x
"""
lambda_with_input_assignment = """
out = lambda x: x + y
"""
lambda_as_arg_assignment = """
import something
out = something(1, lambda x: x)
"""
@pytest.mark.parametrize(
'code_str, inputs, outputs', [
[only_outputs, set(), {'x', 'y'}],
[simple, {'x', 'y'}, {'z'}],
[local_inputs, set(), {'x', 'y', 'z'}],
[imports, set(), {'z'}],
[imported_function, set(), {'df'}],
[input_in_function_call, {'df'}, set()],
[input_key_in_function_call, {'df'},
set()],
[input_key_in_function_call_many, {'df', 'df_another'},
set()],
[input_key_in_function_call_with_dot_access, {'df'},
set()],
[modify_existing_obj_getitem,
set(), {'mapping'}],
[modify_imported_obj_getitem,
set(), set()],
[built_in, set(), {'mapping'}],
[built_in_as_arg, set(), {'something'}],
[input_existing_object, set(), {'X'}],
[define_multiple_outputs,
set(), {'a', 'b', 'c'}],
[define_multiple_outputs_square_brackets,
set(), {'a', 'b', 'c'}],
[define_multiple_outputs_parenthesis,
set(), {'a', 'b', 'c'}],
[define_multiple_outputs_inside_function,
set(), set()],
[
define_multiple_replace_existing,
set(),
{'b', 'c'},
],
[local_function, set(), {'y'}],
[local_function_with_args, set(), {'y'}],
[
local_function_with_args_and_body,
set(),
{'y'},
],
[
local_function_with_kwargs,
set(),
{'y'},
],
[local_class, set(), {'y'}],
[for_loop, {'z'}, {'y'}],
[for_loop_many, set(), {'y'}],
[for_loop_names_with_parenthesis,
set(), {'x'}],
[for_loop_nested, set(), set()],
[for_loop_nested_dependent, set(),
set()],
[for_loop_name_reference, set(), set()],
[for_loop_with_input, {'some_input'
}, set()],
[for_loop_with_local_input,
set(), {'some_variable'}],
[for_loop_with_input_attribute,
{'some_input'}, set()],
[for_loop_with_input_nested_attribute,
{'some_input'
}, set()],
[for_loop_with_input_and_getitem,
{'some_input'
}, set()],
[
for_loop_with_input_and_getitem_input, {'some_input', 'some_key'},
set()
],
[for_loop_with_input_and_nested_getitem,
{'some_input'
}, set()],
[for_loop_with_nested_input,
{'some_input'}, set()],
[getitem_input, {'df'}, set()],
[method_access_input, {'df'}, set()],
[overriding_name, {'x', 'y'}, {'x', 'y'}],
[list_comprehension, {'x'}, set()],
[list_comprehension_attributes,
{'x'}, set()],
[list_comprehension_with_conditional, {'df'}, {'selected', 'targets'}],
[
list_comprehension_with_conditional_and_local_variable,
set(), {'df', 'features'}
],
[list_comprehension_with_f_string,
set(), set()],
[list_comprehension_with_f_string_assignment,
set(), {'y'}],
[list_comprehension_nested, {'reduced_cats'}, {'out'}],
[
list_comprehension_nested_another,
set(),
{'out'},
],
[
list_comprehension_nested_more,
set(),
{'out'},
],
[
list_comprehension_with_left_input,
{'y'},
set(),
],
[set_comprehension, {'numbers'}, {'output'}],
[dict_comprehension, {'numbers', 'y'}, {'output'}],
[dict_comprehension_zip, {'y'}, {'output'}],
[function_with_global_variable,
{'b'}, set()],
[mutating_input, {'df'}, {'df'}],
[mutating_input_implicit, {'df'}, {'df'}],
[function_mutating_local_object,
set(), set()],
[nested_function_arg, {'y'}, set()],
[nested_function_kwarg, {'y'}, set()],
[context_manager, set(), {'x'}],
[
f_string, {'some_variable', 'a_number', 'an_object', 'another'},
set()
],
[
f_string_assignment,
{'some_variable', 'a_number', 'an_object', 'another'}, {'s'}
],
[class_, set(), {'some_object'}],
[lambda_, set(), set()],
[lambda_with_input, {'y'}, set()],
[lambda_as_arg, set(), set()],
[lambda_assignment, set(), {'out'}],
[lambda_with_input_assignment, {'y'}, {'out'}],
[lambda_as_arg_assignment, set(), {'out'}],
],
ids=[
'only_outputs',
'simple',
'local_inputs',
'imports',
'imported_function',
'input_in_function_call',
'input_key_in_function_call',
'input_key_in_function_call_many',
'input_key_in_function_call_with_dot_access',
'modify_existing_getitem',
'modify_imported_getitem',
'built_in',
'built_in_as_arg',
'input_existing_object',
'define_multiple_outputs',
'define_multiple_outputs_square_brackets',
'define_multiple_outputs_parenthesis',
'define_multiple_outputs_inside_function',
'define_multiple_replace_existing',
'local_function',
'local_function_with_args',
'local_function_with_args_and_body',
'local_function_with_kwargs',
'local_class',
'for_loop',
'for_loop_many',
'for_loop_names_with_parenthesis',
'for_loop_nested',
'for_loop_nested_dependent',
'for_loop_name_reference',
'for_loop_with_input',
'for_loop_with_local_input',
'for_loop_with_input_attribute',
'for_loop_with_input_nested_attribute',
'for_loop_with_input_and_getitem',
'for_loop_with_input_and_getitem_input',
'for_loop_with_input_and_nested_getitem',
'for_loop_with_nested_input',
'getitem_input',
'method_access_input',
'overriding_name',
'list_comprehension',
'list_comprehension_attributes',
'list_comprehension_with_conditional',
'list_comprehension_with_conditional_and_local_variable',
'list_comprehension_with_f_string',
'list_comprehension_with_f_string_assignment',
'list_comprehension_nested',
'list_comprehension_nested_another',
'list_comprehension_nested_more',
'list_comprehension_with_left_input',
'set_comprehension',
'dict_comprehension',
'dict_comprehension_zip',
'function_with_global_variable',
'mutating_input',
'mutating_input_implicit',
'function_mutating_local_object',
'nested_function_arg',
'nested_function_kwarg',
'context_manager',
'f_string',
'f_string_assignment',
'class_',
'lambda_',
'lambda_with_input',
'lambda_as_arg',
'lambda_assignment',
'lambda_with_input_assignment',
'lambda_as_arg_assignment',
])
def test_find_inputs_and_outputs(code_str, inputs, outputs):
in_, out = io.find_inputs_and_outputs(code_str)
assert in_ == inputs
assert out == outputs
```
#### File: soorgeon/tests/test_render_notebooks.py
```python
from glob import glob
from pathlib import Path
import pytest
from ploomber.spec import DAGSpec
from conftest import PATH_TO_TESTS
from soorgeon import export
_kaggle = Path(PATH_TO_TESTS, '..', '_kaggle', '_render')
path_to_nbs = glob(str(Path(_kaggle, '*', '*.py')))
def get_name(path):
return Path(path).parent.name
names = [get_name(nb) for nb in path_to_nbs]
@pytest.mark.parametrize('path', path_to_nbs, ids=names)
def test_notebooks(tmp_empty, path):
export.from_path(path, py=True)
DAGSpec('pipeline.yaml').to_dag().render()
``` |
{
"source": "jramirezc93/twitter-timeline-scrap",
"score": 4
} |
#### File: jramirezc93/twitter-timeline-scrap/utils_json.py
```python
import json
def json_tweet_parser(tweet, users, tweets_dict, users_dict):
"""Function to parse a tweet with bs
Arguments:
tweet {str} -- tweet result
users {dict} -- dict with results
tweets_dict {dict} -- dict to store tweets
users_dict {dict} -- dict to store users
Returns:
dict -- dict with tweets stored
dict -- dict with users stored
"""
item_tweet = {}
item_tweet["timestamp"] = tweet["created_at"]
item_tweet["id_tweet"] = tweet["id_str"]
item_tweet["username"] = users[tweet["user_id_str"]]["screen_name"]
item_tweet["id_user"] = tweet["user_id_str"]
item_tweet["link_tweet"] = "twitter.com/"+users[tweet["user_id_str"]]["screen_name"]+"/status/"+tweet["id_str"]
item_tweet["link_user"] = "twitter.com/"+users[tweet["user_id_str"]]["screen_name"]
item_tweet["img_user"] = users[tweet["user_id_str"]]["profile_background_image_url_https"]
item_tweet["text"] = tweet["full_text"]
item_tweet["replies_count"] = tweet["reply_count"]
item_tweet["retweets_count"] = tweet["retweet_count"]
item_tweet["likes_count"] = tweet["favorite_count"]
# Output users
users_dict["username"].append(
item_tweet["username"] if item_tweet["username"] else "")
users_dict["id_user"].append(
item_tweet["id_user"] if item_tweet["id_user"] else "")
users_dict["img_user"].append(
item_tweet["img_user"] if item_tweet["img_user"] else "")
users_dict["link_user"].append(
item_tweet["link_user"] if item_tweet["link_user"] else "")
# Output tweets
tweets_dict["id_tweet"].append(
item_tweet["id_tweet"] if item_tweet["id_tweet"] else "")
tweets_dict["id_user"].append(
item_tweet["id_user"] if item_tweet["id_user"] else "")
tweets_dict["user"].append(
item_tweet["username"] if item_tweet["username"] else "")
tweets_dict["link_tweet"].append(
item_tweet["link_tweet"] if item_tweet["link_tweet"] else "")
tweets_dict["timestamp"].append(
item_tweet["timestamp"] if item_tweet["timestamp"] else "")
tweets_dict["text"].append(
item_tweet["text"] if item_tweet["text"] else "")
tweets_dict["replies_count"].append(
item_tweet["replies_count"] if item_tweet["replies_count"] else "")
tweets_dict["retweets_count"].append(
item_tweet["retweets_count"] if item_tweet["retweets_count"] else "")
tweets_dict["likes_count"].append(
item_tweet["likes_count"] if item_tweet["likes_count"] else "")
return tweets_dict, users_dict
``` |
{
"source": "jramirezneira/RFM-Ideorum",
"score": 3
} |
#### File: RFM-Ideorum/data/retail-rfm.py
```python
import pandas as pd
import numpy as np
import datetime as dt
import seaborn as sns
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
from sqlalchemy import create_engine
import matplotlib.pyplot as plt
import json
# Though the following import is not directly being used, it is required
# for 3D projection to work
#%% create functions for calculating scores by order
def scoreRecency(x,p,d): # low recency is best and is assigned 1+
if x <= d[p][0.25]:return 1
elif x <= d[p][0.50]:return 2
elif x <= d[p][0.75]:return 3
else:return 4
def scoreFrequency(x,p,d): # high frequency is best and is assigned 1
if x <= d[p][0.25]:return 4
elif x <= d[p][0.50]:return 3
elif x <= d[p][0.75]:return 2
else:return 1
def scoreMonetary(x,p,d): # high monetary is best and is assigned 1
if x <= d[p][0.25]:return 4
elif x <= d[p][0.50]:return 3
elif x <= d[p][0.75]:return 2
else:return 1
df_raw = pd.read_excel('retail-data.xlsx')
#%% drop duplicates and group by country and customer ID
df = df_raw.copy()
df.country.nunique()
df.country.unique()
#%% drop duplicates and group by country and customer ID
cc = df[['country','customerid']].drop_duplicates()
cc.groupby(['country'])['customerid']. \
aggregate('count').reset_index(). \
sort_values('customerid', ascending=False)
#%% remove customers without customer ID
df = df[pd.notnull(df['customerid'])]
df.isnull().sum(axis=0)
#%% ensure only positive quantities and prices
df.UnitPrice.min()
df.Quantity.min()
df = df[(df['Quantity']>0)]
#%% check unique value for each column
def unique_counts(df):
for i in df.columns:
count = df[i].nunique()
print(i, ": ", count)
unique_counts(df)
#%% add column for total price
df['TotalPrice'] = df['Quantity'] * df['UnitPrice']
#%% determine first and last order date
df['InvoiceDate'].min()
df['InvoiceDate'].max()
#%% establish day after last purchase as point of calculation for recency
now = dt.datetime(2011,12,10)
df['InvoiceDate'] = pd.to_datetime(df['InvoiceDate'])
#%% create RFM table
rfmTable = df.groupby('customerid').agg({
'InvoiceDate': lambda x: (now - x.max()).days, #recency
'InvoiceNo': lambda x: len(x), #frequency
'TotalPrice': lambda x: x.sum()}) #monetary
rfmTable['InvoiceDate'] = rfmTable['InvoiceDate'].astype(int)
#%% convert invoice date to integer and rename columns for RFM
rfmTable.rename(columns={
'InvoiceDate': 'recency_total',
'InvoiceNo': 'frequency_total',
'TotalPrice': 'monetary_total'}, inplace=True)
#%% shift rfmTable data to quantiles for segmentation
quantiles = rfmTable.quantile(q=[0.25,0.5,0.75])
quantiles = quantiles.to_dict()
quantiles
#%% create a segmented RFM table
rfmSegment = rfmTable.copy()
#%% create new columns for RFM and assign values based on quantile
rfmSegment['r_qt'] = rfmSegment['recency_total'].apply(scoreRecency, args=('recency_total',quantiles,))
rfmSegment['f_qt'] = rfmSegment['frequency_total'].apply(scoreFrequency, args=('frequency_total',quantiles,))
rfmSegment['m_qt'] = rfmSegment['monetary_total'].apply(scoreMonetary, args=('monetary_total',quantiles,))
#%% calculate total RFM score as string composed of individual RFM quantiles
rfmSegment['rfm'] = rfmSegment.r_qt.map(str) \
+ rfmSegment.f_qt.map(str) \
+ rfmSegment.m_qt.map(str)
#%% create categories from rfm
datacomb=[]
datacomb.append([3,3,1,"Big Spenders","331",7])
datacomb.append([1,2,1,"Big Spenders","121",7])
datacomb.append([1,3,1,"Big Spenders","131",7])
datacomb.append([1,4,1,"Big Spenders","141",7])
datacomb.append([2,2,1,"Big Spenders","221",7])
datacomb.append([2,3,1,"Big Spenders","231",7])
datacomb.append([2,4,1,"Big Spenders","241",7])
datacomb.append([3,2,1,"Big Spenders","321",7])
datacomb.append([3,4,1,"Big Spenders","341",7])
datacomb.append([4,2,1,"Big Spenders","421",7])
datacomb.append([4,3,1,"Big Spenders","431",7])
datacomb.append([2,1,1,"Loyal Customers-Big Spenders","211",6])
datacomb.append([3,1,1,"Almost Lost","311",4])
datacomb.append([1,1,1,"Best Customers","111",8])
datacomb.append([4,1,1,"Lost Customers","411",3])
datacomb.append([4,4,4,"Lost Cheap Customers","444",1])
datacomb.append([1,1,2,"Loyal Customers","112",5])
datacomb.append([1,1,3,"Loyal Customers","113",5])
datacomb.append([1,1,4,"Loyal Customers","114",5])
datacomb.append([2,1,2,"Loyal Customers","212",5])
datacomb.append([2,1,3,"Loyal Customers","213",5])
datacomb.append([2,1,4,"Loyal Customers","214",5])
datacomb.append([3,1,2,"Loyal Customers","312",5])
datacomb.append([3,1,3,"Loyal Customers","313",5])
datacomb.append([3,1,4,"Loyal Customers","314",5])
datacomb.append([4,1,2,"Loyal Customers","412",5])
datacomb.append([4,1,3,"Loyal Customers","413",5])
datacomb.append([4,1,4,"Loyal Customers","414",5])
datacomb.append([1,4,2,"Others","142",2])
datacomb.append([1,4,3,"Others","143",2])
datacomb.append([1,4,4,"Others","144",2])
datacomb.append([4,4,1,"Big Spenders","441",7])
datacomb.append([1,2,2,"Others","122",2])
datacomb.append([1,2,3,"Others","123",2])
datacomb.append([1,2,4,"Others","124",2])
datacomb.append([1,3,2,"Others","132",2])
datacomb.append([1,3,3,"Others","133",2])
datacomb.append([1,3,4,"Others","134",2])
datacomb.append([2,2,2,"Others","222",2])
datacomb.append([2,2,3,"Others","223",2])
datacomb.append([2,2,4,"Others","224",2])
datacomb.append([2,3,2,"Others","232",2])
datacomb.append([2,3,3,"Others","233",2])
datacomb.append([2,3,4,"Others","234",2])
datacomb.append([2,4,2,"Others","242",2])
datacomb.append([2,4,3,"Others","243",2])
datacomb.append([2,4,4,"Others","244",2])
datacomb.append([3,2,2,"Others","322",2])
datacomb.append([3,2,3,"Others","323",2])
datacomb.append([3,2,4,"Others","324",2])
datacomb.append([3,3,2,"Others","332",2])
datacomb.append([3,3,3,"Others","333",2])
datacomb.append([3,3,4,"Others","334",2])
datacomb.append([3,4,2,"Others","342",2])
datacomb.append([3,4,3,"Others","343",2])
datacomb.append([3,4,4,"Others","344",2])
datacomb.append([4,2,2,"Others","422",2])
datacomb.append([4,2,3,"Others","423",2])
datacomb.append([4,2,4,"Others","424",2])
datacomb.append([4,3,2,"Others","432",2])
datacomb.append([4,3,3,"Others","433",2])
datacomb.append([4,3,4,"Others","434",2])
datacomb.append([4,4,2,"Others","442",2])
datacomb.append([4,4,3,"Others","443",2])
dfdatacomb= pd.DataFrame(datacomb, columns=['r_qt','r_qt','r_qt', 'description', 'rfm', 'sort'])
#%% import data and clone working dataframe
#%% create data for month to display in chart detail
df_raw['year'] = df_raw['InvoiceDate'].dt.year
df_raw['month'] = df_raw['InvoiceDate'].dt.month
df_final= pd.DataFrame()
df_date = df_raw[['month','year']].drop_duplicates()
for k, r in df_date.iterrows():
#%% get number of unique countries and their names
df = df_raw [(df_raw['month']== r['month']) & (df_raw['year']== r['year'])]
df.country.nunique()
df.country.unique()
#%% drop duplicates and group by country and customer ID
cc = df[['country','customerid']].drop_duplicates()
cc.groupby(['country'])['customerid']. \
aggregate('count').reset_index(). \
sort_values('customerid', ascending=False)
#%% remove customers without customer ID
df = df[pd.notnull(df['customerid'])]
df.isnull().sum(axis=0)
#%% ensure only positive quantities and prices
df.UnitPrice.min()
df.Quantity.min()
df = df[(df['Quantity']>0)]
#%% check unique value for each column
def unique_counts(df):
for i in df.columns:
count = df[i].nunique()
print(i, ": ", count)
unique_counts(df)
#%% add column for total price
df['TotalPrice'] = df['Quantity'] * df['UnitPrice']
#%% determine first and last order date
df['InvoiceDate'].min()
df['InvoiceDate'].max()
#%% establish day after last purchase as point of calculation for recency
now = df['InvoiceDate'].max()
df['InvoiceDate'] = pd.to_datetime(df['InvoiceDate'])
#%% create RFM table
rfmTable = df.groupby(['customerid']).agg({
'InvoiceDate': lambda x: (now - x.max()).days, #recency
'InvoiceNo': lambda x: len(x), #frequency
'TotalPrice': lambda x: x.sum()}) #monetary
rfmTable['InvoiceDate'] = rfmTable['InvoiceDate'].astype(int)
#print(rfmTable)
#%% convert invoice date to integer and rename columns for RFM
rfmTable.rename(columns={
'InvoiceDate': 'recency',
'InvoiceNo': 'frequency',
'TotalPrice': 'monetary'}, inplace=True)
#%% shift rfmTable data to quantiles for segmentation
quantiles = rfmTable.quantile(q=[0.25,0.5,0.75])
quantiles = quantiles.to_dict()
#quantiles
#%% create a segmented RFM table
rfmSegment2 = rfmTable.copy()
#%% create new columns for RFM and assign values based on quantile
rfmSegment2['r_qt'] = rfmSegment2['recency'].apply(scoreRecency, args=('recency',quantiles,))
rfmSegment2['f_qt'] = rfmSegment2['frequency'].apply(scoreFrequency, args=('frequency',quantiles,))
rfmSegment2['m_qt'] = rfmSegment2['monetary'].apply(scoreMonetary, args=('monetary',quantiles,))
#%% calculate total RFM score as string composed of individual RFM quantiles
rfmSegment2['rfm'] = rfmSegment2.r_qt.map(str) \
+ rfmSegment2.f_qt.map(str) \
+ rfmSegment2.m_qt.map(str)
#%% translate raw RFM values to log values for plotting, common log
rfmSegment2 = rfmSegment2.assign(r_lg = lambda x: np.log10(x.recency))
rfmSegment2 = rfmSegment2.assign(r_lg = lambda x: np.log10(x.frequency))
rfmSegment2 = rfmSegment2.assign(r_lg = lambda x: np.log10(x.monetary))
rfmSegment2['month'] = r['month']
rfmSegment2['year'] = r['year']
rfmSegment2['customerid'] = rfmSegment2.index
df_final= df_final.append(rfmSegment2, ignore_index=True)
#df_final = df_final[df_final['customerid'].isin(bestCustomers['customerid'])].sort_values('monetary', ascending=False)
df_final = pd.merge(df_final, dfdatacomb, how='left', on=['rfm', 'rfm'])[['recency','frequency','monetary','rfm','customerid','month','year','sort']]
df_final['date']= df_final['month'].map(str) + " - " + df_final['year'].map(str)
dic = {}
rfmSegment['customerid'] = rfmSegment.index
for k, r in rfmSegment.iterrows():
#calculate score variation among last two month
rfmSegmentLoc = df_final[(df_final['customerid']==r['customerid'])]
if len(rfmSegmentLoc.index) > 1:
rfmSegment.loc[k, 'variation'] = rfmSegmentLoc.iloc[len(rfmSegmentLoc)-1][['sort']].iloc[0] - rfmSegmentLoc.iloc[len(rfmSegmentLoc)-2][['sort']].iloc[0]
else:
rfmSegment.loc[k,'variation'] = 0
#create dictionary for detail chart
dic.update({ str(int(r['customerid'])): {"customerid" : str(int(r['customerid'])) , "result":[
{
"labels": list(df_final[df_final['customerid']== r['customerid']] ['date']),
'datasets': [{'label': str(r['customerid']) , 'data':list(df_final[df_final['customerid']== r['customerid']] ['sort']), 'lineTension': 0.1,
'backgroundColor': 'rgba(75,192,192,0.4)',
'borderColor': 'rgba(75,192,192,1)', }]
},
{
"labels": list(df_final[df_final['customerid']== r['customerid']] ['date']),
'datasets': [{'label': "Customer ID " + str(r['customerid']) , 'data': list(df_final[df_final['customerid']== r['customerid']] ['recency']), 'lineTension': 0.1,
'backgroundColor': 'rgba(75,192,192,0.4)',
'borderColor': 'rgba(75,192,192,1)', }]
}
,
{
"labels": list(df_final[df_final['customerid']== r['customerid']] ['date']),
'datasets': [{'label': "Customer ID " + str(r['customerid']) , 'data': list(df_final[df_final['customerid']== r['customerid']] ['frequency']), 'lineTension': 0.1,
'backgroundColor': 'rgba(75,192,192,0.4)',
'borderColor': 'rgba(75,192,192,1)', }]
}
,
{
"labels": list(df_final[df_final['customerid']== r['customerid']] ['date']),
'datasets': [{'label': "Customer ID " + str(r['customerid']) , 'data': list(df_final[df_final['customerid']== r['customerid']] ['monetary']), 'lineTension': 0.1,
'backgroundColor': 'rgba(75,192,192,0.4)',
'borderColor': 'rgba(75,192,192,1)', }]
}
]}}
)
with open('rfmcustomersTimeSeries.json', 'w') as fp:
json.dump(dic, fp)
#%% variation among last 2 month
#%% create json files
#create json for doughnut chart
rfmSegment['customerid']=rfmSegment.index
rfmSegment= pd.merge(rfmSegment, dfdatacomb, how='left', on=['rfm', 'rfm'])
rfmSegment.index=rfmSegment['customerid']
rfmSegment[['recency_total','frequency_total','monetary_total','customerid','sort', 'description', 'rfm', 'variation']].to_json("rfmcustomers.json", orient='records')
#create json for table
rfmSegment.groupby(['sort','description']).size().reset_index(name='counts').to_json("rfmSegment.json", orient='records')
``` |
{
"source": "jramnai/django-upgrade",
"score": 2
} |
#### File: src/django_upgrade/_data.py
```python
import ast
import pkgutil
from collections import defaultdict
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Iterable,
List,
NamedTuple,
Set,
Tuple,
Type,
TypeVar,
)
from tokenize_rt import Offset, Token
from django_upgrade import _plugins
class Settings(NamedTuple):
target_version: Tuple[int, int]
class State(NamedTuple):
settings: Settings
filename: str
from_imports: Dict[str, Set[str]]
AST_T = TypeVar("AST_T", bound=ast.AST)
TokenFunc = Callable[[int, List[Token]], None]
ASTFunc = Callable[[State, AST_T, ast.AST], Iterable[Tuple[Offset, TokenFunc]]]
if TYPE_CHECKING: # pragma: no cover
from typing import Protocol
else:
Protocol = object
class ASTCallbackMapping(Protocol):
def __getitem__(self, tp: Type[AST_T]) -> List[ASTFunc[AST_T]]: # pragma: no cover
...
def items(self) -> Iterable[Tuple[Any, Any]]: # pragma: no cover
...
def visit(
tree: ast.Module,
settings: Settings,
filename: str,
) -> Dict[Offset, List[TokenFunc]]:
ast_funcs = get_ast_funcs(settings.target_version)
initial_state = State(
settings=settings,
filename=filename,
from_imports=defaultdict(set),
)
nodes: List[Tuple[State, ast.AST, ast.AST]] = [(initial_state, tree, tree)]
ret = defaultdict(list)
while nodes:
state, node, parent = nodes.pop()
for ast_func in ast_funcs[type(node)]:
for offset, token_func in ast_func(state, node, parent):
ret[offset].append(token_func)
if (
isinstance(node, ast.ImportFrom)
and node.level == 0
and (
node.module is not None
and (node.module.startswith("django.") or node.module == "django")
)
):
state.from_imports[node.module].update(
name.name for name in node.names if not name.asname
)
for name in reversed(node._fields):
value = getattr(node, name)
next_state = state
if isinstance(value, ast.AST):
nodes.append((next_state, value, node))
elif isinstance(value, list):
for value in reversed(value):
if isinstance(value, ast.AST):
nodes.append((next_state, value, node))
return ret
class Plugin:
def __init__(self, name: str, min_version: Tuple[int, int]) -> None:
self.name = name
self.min_version = min_version
self.ast_funcs: ASTCallbackMapping = defaultdict(list)
PLUGINS.append(self)
def register(
self, type_: Type[AST_T]
) -> Callable[[ASTFunc[AST_T]], ASTFunc[AST_T]]:
def decorator(func: ASTFunc[AST_T]) -> ASTFunc[AST_T]:
self.ast_funcs[type_].append(func)
return func
return decorator
PLUGINS: List[Plugin] = []
def _import_plugins() -> None:
# https://github.com/python/mypy/issues/1422
plugins_path: str = _plugins.__path__ # type: ignore
mod_infos = pkgutil.walk_packages(plugins_path, f"{_plugins.__name__}.")
for _, name, _ in mod_infos:
__import__(name, fromlist=["_trash"])
_import_plugins()
def get_ast_funcs(target_version: Tuple[int, int]) -> ASTCallbackMapping:
ast_funcs: ASTCallbackMapping = defaultdict(list)
for plugin in PLUGINS:
if target_version >= plugin.min_version:
for type_, type_funcs in plugin.ast_funcs.items():
ast_funcs[type_].extend(type_funcs)
return ast_funcs
``` |
{
"source": "jramnai/enmerkar",
"score": 2
} |
#### File: enmerkar/tests/conftest.py
```python
from django.conf import settings
from testproject import settings as testproject_settings
def pytest_configure():
filtered_settings = {k: v for (k, v) in vars(testproject_settings).items()
if k.isupper()}
settings.configure(**filtered_settings)
``` |
{
"source": "jramnai/ExpenseCalculator",
"score": 2
} |
#### File: ExpenseCalculator/expense/models.py
```python
from __future__ import unicode_literals
from django.db import models
# from model_utils.models import TimeStampedModel
# Create your models here.
class Category(models.Model):
name = models.CharField(max_length=200)
created = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created',)
def get_total_cost(self):
return sum(item for item in self.items.all())
def __str__(self):
return self.name
class Expense(models.Model):
description = models.CharField(max_length=200)
amount = models.CharField(max_length=100)
date = models.DateField(max_length=8)
catgory = models.ForeignKey(
Category, related_name='items', on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created',)
def __str__(self):
return self.description
``` |
{
"source": "jramnai/MyShcool",
"score": 2
} |
#### File: MyShcool/accounts/admin.py
```python
from __future__ import unicode_literals
from django.contrib import admin
# Register your models here.
from django.contrib.auth.models import User
from django.contrib.auth.admin import UserAdmin
from .models import Profile
class ProfileInline(admin.StackedInline):
model = Profile
can_delete = False
verbose_name_plural = 'Profile'
fk_name = 'user'
class CustomUserAdmin(UserAdmin):
inlines = (ProfileInline, )
list_display = ('username', 'email', 'first_name',
'last_name', 'get_enroll_number', 'is_staff', 'get_location')
list_select_related = ('profile', )
def get_location(self, instance):
return instance.profile.role
get_location.short_description = 'role'
def get_enroll_number(self, instance):
return instance.profile.enroll_number
get_enroll_number.short_description = 'Enroll Number'
def get_inline_instances(self, request, obj=None):
if not obj:
return list()
return super(CustomUserAdmin, self).get_inline_instances(request, obj)
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
```
#### File: MyShcool/result/views.py
```python
from __future__ import unicode_literals
from django.shortcuts import render
from .serializers import StandardSerializer, MarksheetSerializer
from rest_framework import viewsets
from models import Marksheet, Standard
# Create your views here.
def index(request):
if not request.user.is_authenticated:
return render(request, "result/index.html")
else:
return render(request, "accounts/profile.html")
class MarksheetViewSet(viewsets.ModelViewSet):
queryset = Marksheet.objects.all()
serializer_class = MarksheetSerializer
class StandardViewSet(viewsets.ModelViewSet):
queryset = Standard.objects.all()
serializer_class = StandardSerializer
``` |
{
"source": "jramnai/PremiumCourse",
"score": 2
} |
#### File: PremiumCourse/CoursePremium/views.py
```python
from __future__ import unicode_literals
from django.shortcuts import render, render_to_response
from .models import Premium
from django.http import HttpResponse
# Create your views here.
def request_course_creator(request):
"""
User has requested course creation access.
"""
user_requested_access(request.user)
return render(request)
def add_user_with_status_unrequested(user):
"""
Adds a user to the course creator table with status 'unrequested'.
If the user is already in the table, this method is a no-op
(state will not be changed).
If the user is marked as is_staff, this method is a no-op (user
will not be added to table).
"""
_add_user(user, Premium.UNREQUESTED)
def user_requested_access(user):
"""
User has requested course creator access.
This changes the user state to CourseCreator.PENDING, unless the user
state is already CourseCreator.GRANTED, in which case this method is a no-op.
"""
user = Premium.objects.get(user=user)
if user.state != Premium.GRANTED:
user.state = Premium.PENDING
user.save()
def _add_user(user, state):
"""
Adds a user to the course creator table with the specified state.
Returns True if user was added to table, else False.
If the user is already in the table, this method is a no-op
(state will not be changed, method will return False).
If the user is marked as is_staff, this method is a no-op (False will be returned).
"""
if Premium.objects.filter(user=user).count() == 0:
entry = Premium(user=user, state=state)
entry.save()
return True
return False
########################################################################
def premium_list(request):
premiums = Premium.objects.all()
choices = Premium.STATES
states = []
for choice in choices:
states.append(str(choice[0]))
return render(request, 'CoursePremium/premium_list.html', {'premiums':premiums, 'states':states})
def update_state(request):
t_state = str(request.GET['id_state'])
t_id = request.user.id
obj = Premium.objects.get(id=t_id)
obj.state = t_state
obj.save()
return HttpResponse(t_state) # Sending an success response
# @receiver(post_init, sender=Premium)
# def post_init_callback(sender, **kwargs):
# """
# Extend to store previous state.
# """
# instance = kwargs['instance']
# instance.orig_state = instance.state
# @receiver(post_save, sender=Premium)
# def post_save_callback(sender, **kwargs):
# """
# Extend to update state_changed time and fire event to update course creator group, if appropriate.
# """
# instance = kwargs['instance']
# # We only wish to modify the state_changed time if the state has been modified. We don't wish to
# # modify it for changes to the notes field.
# if instance.state != instance.orig_state:
# granted_state_change = instance.state == Premium.GRANTED or instance.orig_state == Premium.GRANTED
# # If user has been denied access, granted access, or previously granted access has been
# # revoked, send a notification message to the user.
# if instance.state == CourseCreator.DENIED or granted_state_change:
# send_user_notification.send(
# sender=sender,
# user=instance.user,
# state=instance.state
# )
# # If the user has gone into the 'pending' state, send a notification to interested admin.
# if instance.state == CourseCreator.PENDING:
# send_admin_notification.send(
# sender=sender,
# user=instance.user
# )
# instance.state_changed = timezone.now()
# instance.orig_state = instance.state
# instance.save()
``` |
{
"source": "jramongomez/DAI-1718-UGR",
"score": 3
} |
#### File: DAI-1718-UGR/Practica2/Ejercicio4.py
```python
from flask import Flask, render_template
from flask import request, redirect, url_for
from mandelbrot import *
app = Flask(__name__)
@app.errorhandler(404)
def page_not_found(error):
return render_template('error.html', err = error)
@app.route('/', methods=['GET','POST'])
def index():
return render_template('fractalForm.html')
@app.route('/generaFractal', methods=['GET','POST'])
def generaFractal():
x1 = float(request.args.get('x1'))
x2 = float(request.args.get('x2'))
y1 = float(request.args.get('y1'))
y2 = float(request.args.get('y2'))
anchura = int(request.args.get('anchura'))
iteraciones = int(request.args.get('iteraciones'))
color1 = (request.args.get('color1'))
color2 = (request.args.get('color2'))
color3 = (request.args.get('color3'))
if (not((color1 == "#000000") and (color2 == "#000000") and (color3 == "#000000"))):
#Cada color en HTML tiene el siguiente formato: rrggbb, por tanto separamos cada color en una lista de 3 tuplas (r,g,b).
color1 = color1[1:]
color2 = color2[1:]
color3 = color3[1:]
paleta = ((int(color1[:2], 16), int(color1[2:4], 16), int(color1[4:], 16)),
(int(color2[:2], 16), int(color2[2:4], 16), int(color2[4:], 16)),
(int(color3[:2], 16), int(color3[2:4], 16), int(color3[4:], 16)))
renderizaMandelbrotBonito(x1,y1,x2,y2,anchura, iteraciones, "fractal.png", paleta, len(paleta))
else:
renderizaMandelbrot(x1,y1,x2,y2,anchura, iteraciones, "fractal.png")
return render_template('generaFractal.html')
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True) # 0.0.0.0 para permitir conexiones
# desde cualquier sitio.
# Ojo, peligroso: solo
# en modo debug
```
#### File: DAI-1718-UGR/Practica5/practica5.py
```python
from flask import Flask, render_template, session
from flask import request, redirect, url_for, jsonify
import shelve
from lxml import etree
import xml.etree.cElementTree as ET
from pymongo import MongoClient
app = Flask(__name__)
app.secret_key = '<KEY>'
@app.route('/')
def index():
username = None
if 'username' in session:
username = session['username']
db = shelve.open('datos.dat')
datos = db[username]
db.close()
return render_template('indexLogin.html', usuario = username, profile = datos)
return render_template('index.html', usuario = username)
@app.route('/login', methods=['GET','POST'])
def login():
username = None
if (request.method == 'POST'):
db = shelve.open('datos.dat')
#Compruebo que existe el usuario, en caso de no existir devolverá false
flag = request.form['username'] in db
if flag:
datos = db[request.form['username']]
if (datos['Contrasenia'] == request.form['password']):
session['username'] = request.form['username']
db.close()
return redirect(url_for('index'))
return render_template('login.html')
@app.route('/logout', methods=['GET','POST'])
def logout():
session.pop('username', None)
return redirect(url_for('index'))
@app.route('/register', methods=['GET','POST'])
def register():
if (request.method == 'POST'):
db = shelve.open('datos.dat')
db[request.form['username']] = {'Nombre': request.form['first_name'],
'Apellidos': request.form['last_name'],
'Correo': request.form['email'],
'Telefono': request.form['telephone'],
'Nombre de usuario': request.form['username'],
'Contrasenia': request.form['password']}
db.close()
return redirect(url_for('index'))
return render_template('register.html')
@app.route('/profile', methods=['GET','POST'])
def profile():
username = session['username']
db = shelve.open('datos.dat')
datos = db[username]
db.close()
return render_template('indexLogin.html', profile = datos, usuario= username)
@app.route('/saveProfile', methods=['GET','POST'])
def save():
username = session['username']
db = shelve.open('datos.dat')
db[username] = {'Nombre': request.form['first_name'],
'Apellidos': request.form['last_name'],
'Correo': request.form['email'],
'Telefono': request.form['telephone'],
'Nombre de usuario': username,
'Contrasenia': request.form['password']}
db.close()
return redirect(url_for('index'))
@app.route('/search', methods=['GET','POST'])
def search():
usuario = None
if'username' in session:
usuario = session['username']
query = request.form['tipoBusqueda']
keyword = request.form['keyword']
busqueda = query
# Cuando lo que queremos consultar está dentro de un "array"
if(query == "zipcode" or query == "street"):
query = "address." + query
return render_template('search.html', tipoBusqueda = query, palabraClave = keyword, usuario = usuario)
def buscarRestaurantes(query, keyword, pagina, maxElem, numeroElementos):
client = MongoClient('mongodb://localhost:27017/')
# Obtener base de datos
db = client['test']
# Obtener nuestra colección de la base de datos
restaurants = db.restaurants
busqueda = []
#La búsqueda nos devuelve un puntero iterable, así que extraemos el contenido
rang_min = int(numeroElementos)
rang_max = int(numeroElementos) + int(maxElem)
for r in restaurants.find({query : keyword}).sort("name")[rang_min:rang_max]:
diccionario = { "name": r['name'],
"cuisine": r['cuisine'],
"street": r['address']['street'],
"building" : r['address']['building'],
"zipcode" : r['address']['zipcode'],
"borough" : r['borough'] }
busqueda.append(diccionario)
return busqueda
@app.route('/busqueda_restaurantes')
def responde():
pagina = request.args.get('pagina', '')
maxElementos = request.args.get('maxElem', '')
query = request.args.get('query', '')
keyword = request.args.get('keyword', '')
numeroElementos = request.args.get('numElementos', '')
busqueda = buscarRestaurantes(query, keyword, pagina, maxElementos, numeroElementos)
return jsonify({'busqueda' :busqueda})
def page_not_found(error):
return render_template('error.html', err = error)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True) # 0.0.0.0 para permitir conexiones
# desde cualquier sitio.
# Ojo, peligroso: solo
# en modo debug
``` |
{
"source": "jramosdc/whoisthispolitician",
"score": 3
} |
#### File: jramosdc/whoisthispolitician/classify.py
```python
import tensorflow as tf
import sys
import numpy as np
from PIL import Image
import cv2
import os, os.path
# speicherorte fuer trainierten graph und labels in train.sh festlegen ##
# Disable tensorflow compilation warnings
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import tensorflow as tf
image_path = sys.argv[1]
# angabe in console als argument nach dem aufruf
##CROPPING
#multiple cascades: https://github.com/Itseez/opencv/tree/master/data/haarcascades
#https://github.com/Itseez/opencv/blob/master/data/haarcascades/haarcascade_frontalface_default.xml
face_cascade = cv2.CascadeClassifier('faces.xml')
#https://github.com/Itseez/opencv/blob/master/data/haarcascades/haarcascade_eye.xml
eye_cascade = cv2.CascadeClassifier('eye.xml')
nfaces_detected = 0
# note the dependency on the format of the filename
img = cv2.imread(image_path)
height = img.shape[0]
width = img.shape[1]
size = height * width
#???
# if size > (500^2):
# r = 500.0 / img.shape[1]
# dim = (500, int(img.shape[0] * r))
# img2 = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
# img = img2
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
#faces = face_cascade.detectMultiScale(gray, 1.3, 5)
faces = face_cascade.detectMultiScale(gray, scaleFactor=1.3, minNeighbors=3, minSize=(15, 15), flags = cv2.CASCADE_SCALE_IMAGE )
nface_within_pic = 0
for (x,y,w,h) in faces:
face_with_eyes_detected = 0
imgCrop = img[y:y+h,x:x+w]
#cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)
roi_gray = gray[y:y+h, x:x+w]
roi_color = img[y:y+h, x:x+w]
#eyes = eye_cascade.detectMultiScale(roi_gray)
eyes = face_cascade.detectMultiScale(roi_gray, scaleFactor=1.3, minNeighbors=3, minSize=(5, 5), flags = cv2.CASCADE_SCALE_IMAGE )
eyesn = 0
for (ex,ey,ew,eh) in eyes:
#cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,255,0),2)
eyesn = eyesn +1
# allow detection if only one 1 eye for sideways face profile ?
# No, always assume a frontal profile since that's the haar detection profile we chose above
# if eyesn >= 1:
if eyesn >= 1:
face_with_eyes_detected = 1
#cv2.imshow('img',imgCrop)
if face_with_eyes_detected > 0:
cv2.imwrite('face'+str(nface_within_pic)+'.jpg', imgCrop)
print("Image has been processed and cropped")
nface_within_pic += 1
nfaces_detected += 1
##CROPPING ENDSL
#CHOOSE BIGGEST FACE
filenames= ['face%d.jpg'%(i,) for i in range(nfaces_detected)]
sizes = [Image.open(f, 'r').size for f in filenames]
largest= max(sizes)
index= sizes.index(largest)
imagefile= filenames[index]
print(imagefile+ " is the largest face, so we will id it.")
#bilddatei readen
image_data = tf.gfile.FastGFile(imagefile, 'rb').read()
# holt labels aus file in array
label_lines = [line.rstrip() for line
in tf.gfile.GFile("tf_files/retrained_labels.txt")]
# !! labels befinden sich jeweils in eigenen lines -> keine aenderung in retrain.py noetig -> falsche darstellung im windows editor !!
# graph einlesen, wurde in train.sh -> call retrain.py trainiert
with tf.gfile.FastGFile("tf_files/retrained_graph.pb", 'rb') as f:
graph_def = tf.GraphDef() ## The graph-graph_def is a saved copy of a TensorFlow graph; objektinitialisierung
graph_def.ParseFromString(f.read()) #Parse serialized protocol buffer data into variable
_ = tf.import_graph_def(graph_def, name='') # import a serialized TensorFlow GraphDef protocol buffer, extract objects in the GraphDef as tf.Tensor
#https://github.com/Hvass-Labs/TensorFlow-Tutorials/blob/master/inception.py ; ab zeile 276
with tf.Session() as sess:
softmax_tensor = sess.graph.get_tensor_by_name('final_result:0')
# return: Tensor("final_result:0", shape=(?, 4), dtype=float32); stringname definiert in retrain.py, zeile 1064
predictions = sess.run(softmax_tensor, \
{'DecodeJpeg/contents:0': image_data})
# gibt prediction values in array zuerueck:
top_k = predictions[0].argsort()[-len(predictions[0]):][::-1]
# sortierung; circle -> 0, plus -> 1, square -> 2, triangle -> 3; array return bsp [3 1 2 0] -> sortiert nach groesster uebereinstimmmung
# output
for node_id in top_k:
human_string = label_lines[node_id]
score = predictions[0][node_id]
print('%s (score = %.2f)' % (human_string, score))
``` |
{
"source": "jramosf/lemur",
"score": 2
} |
#### File: lemur/common/defaults.py
```python
import re
import unicodedata
from cryptography import x509
from flask import current_app
from lemur.common.utils import is_selfsigned
from lemur.extensions import sentry
from lemur.constants import SAN_NAMING_TEMPLATE, DEFAULT_NAMING_TEMPLATE
def text_to_slug(value, joiner='-'):
"""
Normalize a string to a "slug" value, stripping character accents and removing non-alphanum characters.
A series of non-alphanumeric characters is replaced with the joiner character.
"""
# Strip all character accents: decompose Unicode characters and then drop combining chars.
value = ''.join(c for c in unicodedata.normalize('NFKD', value) if not unicodedata.combining(c))
# Replace all remaining non-alphanumeric characters with joiner string. Multiple characters get collapsed into a
# single joiner. Except, keep 'xn--' used in IDNA domain names as is.
value = re.sub(r'[^A-Za-z0-9.]+(?<!xn--)', joiner, value)
# '-' in the beginning or end of string looks ugly.
return value.strip(joiner)
def certificate_name(common_name, issuer, not_before, not_after, san):
"""
Create a name for our certificate. A naming standard
is based on a series of templates. The name includes
useful information such as Common Name, Validation dates,
and Issuer.
:param san:
:param common_name:
:param not_after:
:param issuer:
:param not_before:
:rtype: str
:return:
"""
if san:
t = SAN_NAMING_TEMPLATE
else:
t = DEFAULT_NAMING_TEMPLATE
temp = t.format(
subject=common_name,
issuer=issuer.replace(' ', ''),
not_before=not_before.strftime('%Y%m%d'),
not_after=not_after.strftime('%Y%m%d')
)
temp = temp.replace('*', "WILDCARD")
return text_to_slug(temp)
def signing_algorithm(cert):
return cert.signature_hash_algorithm.name
def common_name(cert):
"""
Attempts to get a sane common name from a given certificate.
:param cert:
:return: Common name or None
"""
try:
return cert.subject.get_attributes_for_oid(
x509.OID_COMMON_NAME
)[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get common name! {0}".format(e))
def organization(cert):
"""
Attempt to get the organization name from a given certificate.
:param cert:
:return:
"""
try:
return cert.subject.get_attributes_for_oid(
x509.OID_ORGANIZATION_NAME
)[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get organization! {0}".format(e))
def organizational_unit(cert):
"""
Attempt to get the organization unit from a given certificate.
:param cert:
:return:
"""
try:
return cert.subject.get_attributes_for_oid(
x509.OID_ORGANIZATIONAL_UNIT_NAME
)[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get organizational unit! {0}".format(e))
def country(cert):
"""
Attempt to get the country from a given certificate.
:param cert:
:return:
"""
try:
return cert.subject.get_attributes_for_oid(
x509.OID_COUNTRY_NAME
)[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get country! {0}".format(e))
def state(cert):
"""
Attempt to get the from a given certificate.
:param cert:
:return:
"""
try:
return cert.subject.get_attributes_for_oid(
x509.OID_STATE_OR_PROVINCE_NAME
)[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get state! {0}".format(e))
def location(cert):
"""
Attempt to get the location name from a given certificate.
:param cert:
:return:
"""
try:
return cert.subject.get_attributes_for_oid(
x509.OID_LOCALITY_NAME
)[0].value.strip()
except Exception as e:
sentry.captureException()
current_app.logger.error("Unable to get location! {0}".format(e))
def domains(cert):
"""
Attempts to get an domains listed in a certificate.
If 'subjectAltName' extension is not available we simply
return the common name.
:param cert:
:return: List of domains
"""
domains = []
try:
ext = cert.extensions.get_extension_for_oid(x509.OID_SUBJECT_ALTERNATIVE_NAME)
entries = ext.value.get_values_for_type(x509.DNSName)
for entry in entries:
domains.append(entry)
except x509.ExtensionNotFound:
if current_app.config.get("LOG_SSL_SUBJ_ALT_NAME_ERRORS", True):
sentry.captureException()
except Exception as e:
sentry.captureException()
return domains
def serial(cert):
"""
Fetch the serial number from the certificate.
:param cert:
:return: serial number
"""
return cert.serial_number
def san(cert):
"""
Determines if a given certificate is a SAN certificate.
SAN certificates are simply certificates that cover multiple domains.
:param cert:
:return: Bool
"""
if len(domains(cert)) > 1:
return True
def is_wildcard(cert):
"""
Determines if certificate is a wildcard certificate.
:param cert:
:return: Bool
"""
d = domains(cert)
if len(d) == 1 and d[0][0:1] == "*":
return True
if cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[0].value[0:1] == "*":
return True
def bitstrength(cert):
"""
Calculates a certificates public key bit length.
:param cert:
:return: Integer
"""
try:
return cert.public_key().key_size
except AttributeError:
sentry.captureException()
current_app.logger.debug('Unable to get bitstrength.')
def issuer(cert):
"""
Gets a sane issuer slug from a given certificate, stripping non-alphanumeric characters.
For self-signed certificates, the special value '<selfsigned>' is returned.
If issuer cannot be determined, '<unknown>' is returned.
:param cert: Parsed certificate object
:return: Issuer slug
"""
# If certificate is self-signed, we return a special value -- there really is no distinct "issuer" for it
if is_selfsigned(cert):
return '<selfsigned>'
# Try Common Name or fall back to Organization name
attrs = (cert.issuer.get_attributes_for_oid(x509.OID_COMMON_NAME) or
cert.issuer.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME))
if not attrs:
current_app.logger.error("Unable to get issuer! Cert serial {:x}".format(cert.serial_number))
return '<unknown>'
return text_to_slug(attrs[0].value, '')
def not_before(cert):
"""
Gets the naive datetime of the certificates 'not_before' field.
This field denotes the first date in time which the given certificate
is valid.
:param cert:
:return: Datetime
"""
return cert.not_valid_before
def not_after(cert):
"""
Gets the naive datetime of the certificates 'not_after' field.
This field denotes the last date in time which the given certificate
is valid.
:return: Datetime
"""
return cert.not_valid_after
```
#### File: plugins/lemur_acme/dyn.py
```python
import time
import dns
import dns.exception
import dns.name
import dns.query
import dns.resolver
from dyn.tm.errors import DynectCreateError, DynectGetError
from dyn.tm.session import DynectSession
from dyn.tm.zones import Node, Zone, get_all_zones
from flask import current_app
def get_dynect_session():
dynect_session = DynectSession(
current_app.config.get('ACME_DYN_CUSTOMER_NAME', ''),
current_app.config.get('ACME_DYN_USERNAME', ''),
current_app.config.get('ACME_DYN_PASSWORD', ''),
)
return dynect_session
def _has_dns_propagated(name, token):
txt_records = []
try:
dns_resolver = dns.resolver.Resolver()
dns_resolver.nameservers = [get_authoritative_nameserver(name)]
dns_response = dns_resolver.query(name, 'TXT')
for rdata in dns_response:
for txt_record in rdata.strings:
txt_records.append(txt_record.decode("utf-8"))
except dns.exception.DNSException:
return False
for txt_record in txt_records:
if txt_record == token:
return True
return False
def wait_for_dns_change(change_id, account_number=None):
fqdn, token = change_id
number_of_attempts = 10
for attempts in range(0, number_of_attempts):
status = _has_dns_propagated(fqdn, token)
current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status))
if status:
break
time.sleep(20)
if not status:
# TODO: Delete associated DNS text record here
raise Exception("Unable to query DNS token for fqdn {}.".format(fqdn))
return
def get_zone_name(domain):
zones = get_all_zones()
zone_name = ""
for z in zones:
if domain.endswith(z.name):
# Find the most specific zone possible for the domain
# Ex: If fqdn is a.b.c.com, there is a zone for c.com,
# and a zone for b.c.com, we want to use b.c.com.
if z.name.count(".") > zone_name.count("."):
zone_name = z.name
if not zone_name:
raise Exception("No Dyn zone found for domain: {}".format(domain))
return zone_name
def get_zones(account_number):
get_dynect_session()
zones = get_all_zones()
zone_list = []
for zone in zones:
zone_list.append(zone.name)
return zone_list
def create_txt_record(domain, token, account_number):
get_dynect_session()
zone_name = get_zone_name(domain)
zone_parts = len(zone_name.split('.'))
node_name = '.'.join(domain.split('.')[:-zone_parts])
fqdn = "{0}.{1}".format(node_name, zone_name)
zone = Zone(zone_name)
try:
zone.add_record(node_name, record_type='TXT', txtdata="\"{}\"".format(token), ttl=5)
zone.publish()
current_app.logger.debug("TXT record created: {0}, token: {1}".format(fqdn, token))
except DynectCreateError as e:
if "Cannot duplicate existing record data" in e.message:
current_app.logger.debug(
"Unable to add record. Domain: {}. Token: {}. "
"Record already exists: {}".format(domain, token, e), exc_info=True
)
else:
raise
change_id = (fqdn, token)
return change_id
def delete_txt_record(change_id, account_number, domain, token):
get_dynect_session()
if not domain:
current_app.logger.debug("delete_txt_record: No domain passed")
return
zone_name = get_zone_name(domain)
zone_parts = len(zone_name.split('.'))
node_name = '.'.join(domain.split('.')[:-zone_parts])
fqdn = "{0}.{1}".format(node_name, zone_name)
zone = Zone(zone_name)
node = Node(zone_name, fqdn)
try:
all_txt_records = node.get_all_records_by_type('TXT')
except DynectGetError:
# No Text Records remain or host is not in the zone anymore because all records have been deleted.
return
for txt_record in all_txt_records:
if txt_record.txtdata == ("{}".format(token)):
current_app.logger.debug("Deleting TXT record name: {0}".format(fqdn))
txt_record.delete()
zone.publish()
def delete_acme_txt_records(domain):
get_dynect_session()
if not domain:
current_app.logger.debug("delete_acme_txt_records: No domain passed")
return
acme_challenge_string = "_acme-challenge"
if not domain.startswith(acme_challenge_string):
current_app.logger.debug(
"delete_acme_txt_records: Domain {} doesn't start with string {}. "
"Cowardly refusing to delete TXT records".format(domain, acme_challenge_string))
return
zone_name = get_zone_name(domain)
zone_parts = len(zone_name.split('.'))
node_name = '.'.join(domain.split('.')[:-zone_parts])
fqdn = "{0}.{1}".format(node_name, zone_name)
zone = Zone(zone_name)
node = Node(zone_name, fqdn)
all_txt_records = node.get_all_records_by_type('TXT')
for txt_record in all_txt_records:
current_app.logger.debug("Deleting TXT record name: {0}".format(fqdn))
txt_record.delete()
zone.publish()
def get_authoritative_nameserver(domain):
if current_app.config.get('ACME_DYN_GET_AUTHORATATIVE_NAMESERVER'):
n = dns.name.from_text(domain)
depth = 2
default = dns.resolver.get_default_resolver()
nameserver = default.nameservers[0]
last = False
while not last:
s = n.split(depth)
last = s[0].to_unicode() == u'@'
sub = s[1]
query = dns.message.make_query(sub, dns.rdatatype.NS)
response = dns.query.udp(query, nameserver)
rcode = response.rcode()
if rcode != dns.rcode.NOERROR:
if rcode == dns.rcode.NXDOMAIN:
raise Exception('%s does not exist.' % sub)
else:
raise Exception('Error %s' % dns.rcode.to_text(rcode))
if len(response.authority) > 0:
rrset = response.authority[0]
else:
rrset = response.answer[0]
rr = rrset[0]
if rr.rdtype != dns.rdatatype.SOA:
authority = rr.target
nameserver = default.query(authority).rrset[0].to_text()
depth += 1
return nameserver
else:
return "8.8.8.8"
``` |
{
"source": "jramosperoni/prueba_back",
"score": 3
} |
#### File: app/users/routes.py
```python
import logging
from flask import render_template, current_app, request, redirect, url_for, \
flash, jsonify, session
from flask_login import login_user, logout_user, login_required, current_user
from .models import User
from . import users_bp
from .forms import LoginForm, UserFormAdd, UserFormEdit
from .. import db
from ..helpers import flash_errors
@users_bp.route('/login', methods=['GET', 'POST'])
def login():
# if not current_app.config['DEBUG'] and not current_app.config['TESTING'] \
# and not request.is_secure:
# return redirect(url_for('.login', _external=True, _scheme='https'))
form = LoginForm()
if form.validate_on_submit():
username = form.username.data
if User.rut_is_username:
username = username.upper()
user = User.query.filter_by(username=username).first()
if user is None or not user.verify_password(form.password.data):
flash(u"Usuario o contraseña invalidos.")
return redirect(url_for('.login'))
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('.index'))
return render_template('users/login.html', form=form)
@users_bp.route('/')
def index():
if current_user is not None and current_user.is_authenticated:
return render_template('users/welcome.html')
else:
return redirect(url_for('users.login'))
@users_bp.route('/logout')
@login_required
def logout():
session.clear()
logout_user()
flash('Usted ha salido.')
return redirect(url_for('.index'))
@users_bp.route('/users')
@login_required
def users():
page = request.args.get('page', 1, type=int)
pagination = User.query.order_by('rut').paginate(
page, per_page=20,
error_out=False)
users_list = pagination.items
return render_template('users/users.html', users=users_list,
pagination=pagination)
@users_bp.route('/user/', methods=['GET', 'POST'], defaults={'id': None})
@users_bp.route('/user/<int:id>', methods=['GET', 'POST'])
@login_required
def user_edit(id=None):
if current_user is not None and current_user.is_authenticated:
if current_user.is_admin:
pass
else:
# if id and current_user.id == id:
# pass
# else:
return render_template('wms/403_forbidden.html'), 403
if id:
form = UserFormEdit()
else:
form = UserFormAdd()
# form.wristband_id.choices = [(wb.id, wb.name) for wb in Wristband.query.order_by('id')]
if request.method == 'POST':
if id:
user = User.query.get_or_404(id)
else:
user = User()
if form.validate_on_submit():
try:
# if id:
# User.validate_wristband(form.wristband_id.data, id)
# else:
# User.validate_wristband(form.wristband_id.data)
form.to_model(user)
db.session.add(user)
db.session.commit()
flash('Usuario editado correctamente.')
except Exception as e:
logging.exception(e)
flash('Ha ocurrido un problema con la solicitud.')
return render_template('users/edit_user.html', form=form)
return redirect(url_for('.users'))
else:
flash_errors(form)
return render_template('users/edit_user.html', form=form)
elif request.method == 'GET':
if id:
user = User.query.get_or_404(id)
form.from_model(user)
return render_template('users/edit_user.html', form=form)
@users_bp.route('/user/<int:id>', methods=['DELETE'])
@login_required
def user_delete(id):
user = User.query.get_or_404(id)
db.session.delete(user)
db.session.commit()
return jsonify({'status': 'OK'})
``` |
{
"source": "jramseygreen/osu_bot_framework-v3",
"score": 2
} |
#### File: osu_bot_framework-v3/logic_profiles/AutoHostRotate.py
```python
class AutoHostRotate:
def __init__(self, bot, channel):
self.bot = bot
self.channel = channel
self.queue = channel.get_users().copy()
self.skip_vote = channel.new_vote_manager(self.carry_skip_vote)
self.start_vote = channel.new_vote_manager(self.carry_start_vote)
self.abort_vote = channel.new_vote_manager(self.channel.abort_match)
channel.set_command("!q", self.show_queue, "Shows the current queue of players")
channel.set_command("!queue", self.show_queue, "Shows the current queue of players")
channel.set_command("!skip", self.skip, "If you are host, changes the host to the next username in the queue else starts vote to skip current host")
channel.set_command("!randmap", channel.common_commands.randmap,"When host or referee, searches for a random beatmap matching the room's limits and ranges")
channel.set_command("!altlink", channel.common_commands.altlink,"Returns an alternate link for the current beatmap from Chimu.moe")
channel.set_command("!topdiff", channel.common_commands.topdiff,"When host, upgrades the beatmap to the highest difficulty within the room limits and ranges")
channel.set_command("!start", self.start,"When host starts the game with optional countdown timer else starts vote to start match")
channel.set_command("!mp start", self.mp_start,"When host starts the game with optional countdown timer else starts vote to start match")
channel.set_command("!aborttimer", channel.common_commands.abort_start_timer,"When host or referee, aborts start timer")
channel.set_command("!mp aborttimer", channel.common_commands.abort_start_timer, "When host or referee, aborts start timer")
channel.set_command("!abort", self.abort, "Starts vote to abort match")
channel.set_command("!mp abort", self.mp_abort, "Starts vote to abort match")
channel.set_command("!update", channel.common_commands.update_beatmap, "Updates current beatmap")
channel.set_command("!fight", channel.common_commands.fight, "Fight another user! Victories stack up.")
channel.set_command("R̲e̲f̲e̲r̲e̲e̲ C̲o̲m̲m̲a̲n̲d̲s̲", "")
channel.set_command("*skip", self.skip, "Changes the host to the next username in the queue")
channel.set_command("*implement", channel.common_commands.implement_logic_profile, "Implements a logic profile")
channel.set_command("*logic_profiles", channel.common_commands.get_logic_profiles, "Shows available logic profiles")
channel.set_command("*ar_range", channel.common_commands.ar_range, "Sets the ar range for the room. e.g. *ar_range 5.5 8")
channel.set_command("*od_range", channel.common_commands.od_range, "Sets the od range for the room. e.g. *od_range 5.5 8")
channel.set_command("*hp_range", channel.common_commands.hp_range, "Sets the hp range for the room. e.g. *hp_range 5.5 8")
channel.set_command("*cs_range", channel.common_commands.cs_range, "Sets the cs range for the room. e.g. *cs_range 5.5 8")
channel.set_command("*bpm_range", channel.common_commands.bpm_range, "Sets the bpm range for the room. e.g. *bpm_range 80 120")
channel.set_command("*diff_range", channel.common_commands.diff_range, "Sets the difficulty range for the room. e.g. *diff_range 4 5.99")
channel.set_command("*length_range", channel.common_commands.length_range, "Sets the length range for the room in seconds. e.g. *length_range 0 600")
channel.set_command("*map_status", channel.common_commands.map_status, "Sets the allowed map statuses for the room. e.g. *map_status ranked loved")
channel.set_command("*mods", channel.common_commands.mods, "Sets the allowed mods for the room. e.g. *mods freemod")
channel.set_command("*scoring_type", channel.common_commands.scoring_type, "Sets the allowed scoring mode for the room. e.g. *scoring_type score")
channel.set_command("*team_type", channel.common_commands.team_type, "Sets the allowed team mode for the room. e.g. *team_type head-to-head")
channel.set_command("*game_mode", channel.common_commands.game_mode, "Sets the allowed game mode for the room. e.g. *game_mode osu")
channel.set_command("*start_broadcast", channel.common_commands.add_broadcast, "Starts a broadcast in the channel. e.g. *start_broadcast 5 message sent every 5min")
channel.set_command("*stop_broadcast", channel.common_commands.del_broadcast, "Stops a broadcast in the channel given it's ID. e.g. *stop_broadcast 0")
channel.set_command("*welcome", channel.common_commands.welcome_message, "Sets the welcome message for the room. e.g. *welcome welcome to my osu room!")
channel.set_command("*disable_beatmap_checker", channel.common_commands.disable_beatmap_checker, "Disables beatmap checker")
channel.set_command("*enable_beatmap_checker", channel.common_commands.enable_beatmap_checker, "Enables beatmap checker")
channel.set_command("*enable_convert", channel.common_commands.allow_convert, "Allows beatmap conversion")
channel.set_command("*disable_convert", channel.common_commands.disallow_convert, "Disallows beatmap conversion")
channel.set_command("*enable_unsubmitted", channel.common_commands.allow_unsubmitted, "Allows unsubmitted beatmaps")
channel.set_command("*disable_unsubmitted", channel.common_commands.disallow_unsubmitted, "Disallows unsubmitted beatmaps")
channel.set_command("*add_artist_whitelist", channel.common_commands.add_artist_whitelist, "Adds an artist to the whitelist. e.g. *add_artist_whitelist eminem")
channel.set_command("*add_artist_blacklist", channel.common_commands.add_artist_blacklist, "Adds an artist to the blacklist. e.g. *add_artist_blacklist eminem")
channel.set_command("*add_creator_whitelist", channel.common_commands.add_beatmap_creator_whitelist, "Adds a beatmap creator to the whitelist. e.g. *add_creator_whitelist sotarks")
channel.set_command("*add_creator_blacklist", channel.common_commands.add_beatmap_creator_blacklist, "Adds a beatmap creator to the blacklist. e.g. *add_creator_blacklist sotarks")
channel.set_command("*del_artist_whitelist", channel.common_commands.del_artist_whitelist, "Removes an artist from the whitelist. e.g. *del_artist_whitelist eminem")
channel.set_command("*del_artist_blacklist", channel.common_commands.del_artist_blacklist, "Removes an artist from the blacklist. e.g. *del_artist_blacklist eminem")
channel.set_command("*del_creator_whitelist", channel.common_commands.del_beatmap_creator_whitelist, "Removes a beatmap creator from the whitelist. e.g. *del_creator_whitelist sotarks")
channel.set_command("*del_creator_blacklist", channel.common_commands.del_beatmap_creator_blacklist, "Removes a beatmap creator from the blacklist. e.g. *del_creator_blacklist sotarks")
channel.set_command("*add_player_blacklist", channel.common_commands.add_player_blacklist, "adds a player to the blacklist.")
channel.set_command("*del_player_blacklist", channel.common_commands.del_player_blacklist, "Removes a player from the blacklist.")
channel.set_command("*enable_start_on_players_ready", channel.common_commands.enable_start_on_players_ready, "enables starting the match when all players are ready")
channel.set_command("*disable_start_on_players_ready", channel.common_commands.disable_start_on_players_ready, "disables starting the match when all players are ready")
channel.set_command("*autostart", channel.common_commands.set_auto_start_timer, "Automatically adds start countdown after map is selected. e.g. *autostart 120")
channel.set_command("*enable_maintain_password", channel.common_commands.enable_maintain_password, "Enables maintaining password")
channel.set_command("*disable_maintain_password", channel.common_commands.disable_maintain_password,"disables maintaining password")
channel.set_command("*enable_maintain_size", channel.common_commands.enable_maintain_size, "Enables maintaining size")
channel.set_command("*disable_maintain_size", channel.common_commands.disable_maintain_size, "Disables maintaining size")
channel.set_command("*enable_auto_download", channel.common_commands.enable_auto_download, "Enables automatic downloading of maps for the bot administrator")
channel.set_command("*disable_auto_download", channel.common_commands.disable_auto_download,"Disables automatic downloading of maps for the bot administrator")
def show_queue(self, message):
if self.queue:
self.channel.send_message("The current queue is: " + ", ".join(self.queue))
def skip(self, message):
if message["username"] == self.channel.get_formatted_host() or (message["content"] == "*skip" and self.channel.has_referee(message["username"])):
if self.queue:
self.queue.append(self.queue.pop(0))
self.channel.set_host(self.queue[0])
self.skip_vote.stop()
elif not self.channel.in_progress():
self.skip_vote.cast_ballot(message["username"], "Skip host")
def carry_skip_vote(self, vote_manager):
self.channel.send_message("Skip host vote successful - voted for by: " + ", ".join(list(vote_manager.get_results("Skip host").keys())) + " | (" + str(vote_manager.get_threshold()) + " / " + str(len(self.channel.get_users())) + " players)")
if self.queue:
self.queue.append(self.queue.pop(0))
self.channel.set_host(self.queue[0])
def start(self, message):
if message["username"] == self.channel.get_formatted_host():
self.channel.common_commands.start_timer(message)
elif not self.channel.in_progress():
self.start_vote.cast_ballot(message["username"], "Start match")
def abort(self, message):
if self.channel.in_progress():
self.abort_vote.cast_ballot(message["username"], "End match")
def carry_start_vote(self, vote_manager):
self.channel.send_message("Match start vote successful - voted for by: " + ", ".join(list(vote_manager.get_results("Start match").keys())) + " | (" + str(vote_manager.get_threshold()) + " / " + str(len(self.channel.get_users())) + " players)")
self.channel.start_match(10)
def on_join(self, username):
self.queue.append(username)
if self.channel.get_users() == [username]:
self.channel.set_host(self.queue[0])
def on_part(self, username):
if self.queue[0] == username and len(self.queue) > 1 and not self.channel.in_progress():
self.queue.remove(username)
self.channel.set_host(self.queue[0])
else:
self.queue.remove(username)
def on_match_start(self):
self.start_vote.stop()
self.skip_vote.stop()
def on_match_finish(self):
if self.queue:
if self.queue[0] == self.channel.get_host():
self.queue.append(self.queue.pop(0))
self.channel.set_host(self.queue[0])
self.abort_vote.stop()
def on_match_abort(self):
self.on_match_finish()
def on_host_change(self, old_host, new_host):
if not self.channel.has_referee(new_host) and new_host != self.queue[0]:
self.channel.set_host(self.queue[0])
self.channel.send_message(old_host + " please type '!skip' if you want to skip your turn")
else:
self.skip_vote.stop()
def mp_start(self, message):
if not self.channel.has_referee(message["username"]) and message["username"] == self.channel.get_formatted_host():
self.start(message)
def mp_abort(self, message):
if not self.channel.has_referee(message["username"]) and self.channel.in_progress():
self.abort(message)
```
#### File: osu_bot_framework-v3/logic_profiles/KingOfTheHill.py
```python
class KingOfTheHill:
def __init__(self, bot, channel):
self.bot = bot
self.channel = channel
channel.set_beatmap_checker(True)
channel.maintain_password(True)
channel.maintain_size(True)
channel.set_custom_config("King Of The Hill:\n\nSee how good you really are! The bot will automatically give host to the top scoring player.\n\n")
self.scores = []
channel.set_command("!randmap", channel.common_commands.randmap, "When host or referee, searches for a random beatmap matching the room's limits and ranges")
channel.set_command("!altlink", channel.common_commands.altlink, "Returns an alternate link for the current beatmap from Chimu.moe")
channel.set_command("!topdiff", channel.common_commands.topdiff, "When host, upgrades the beatmap to the highest difficulty within the room limits and ranges")
channel.set_command("!start", channel.common_commands.start_timer, "When host or referee, starts the game with optional countdown timer")
channel.set_command("!aborttimer", channel.common_commands.abort_start_timer, "When host or referee, aborts start timer")
channel.set_command("!update", channel.common_commands.update_beatmap, "Updates current beatmap")
channel.set_command("!skip", self.skip, "when you are the host, transfers host to the next highest scoring player")
channel.set_command("R̲e̲f̲e̲r̲e̲e̲ C̲o̲m̲m̲a̲n̲d̲s̲", "")
channel.set_command("*implement", channel.common_commands.implement_logic_profile, "Implements a logic profile")
channel.set_command("*logic_profiles", channel.common_commands.get_logic_profiles, "Shows available logic profiles")
channel.set_command("*scoring_type", channel.common_commands.scoring_type, "Sets the allowed scoring mode for the room. e.g. *scoring_type score")
def skip(self, message):
if self.channel.get_formatted_host() == message["username"]:
if self.scores:
self.scores.append(self.scores.pop(0))
self.channel.set_host(self.scores[0]["username"])
elif self.channel.has_users():
self.channel.get_users().append(self.channel.get_users().pop(0))
self.channel.set_host(self.channel.get_users()[0])
def on_join(self, username):
if self.channel.get_users() == [username]:
self.channel.set_host(username)
def on_part(self, username):
if self.channel.get_host() == username:
self.scores.pop(0)
if self.channel.has_users():
if self.scores:
self.channel.set_host(self.scores[0]["username"])
else:
self.channel.set_host(self.channel.get_users()[0])
def on_match_finish(self):
self.scores = self.channel.get_ordered_scores()
match = self.channel.get_match_data()
key = "max_combo"
if "score" in match["scoring_type"]:
key = "score"
elif "accuracy" == match["scoring_type"]:
key = "accuracy"
self.channel.send_message(self.scores[0]["username"] + " wins with " + match["scoring_type"].replace("v2", "") + ": " + str(round(self.scores[0][key] * 100) / 100))
self.channel.set_host(self.scores[0]["username"])
def on_match_abort(self):
self.on_match_finish()
```
#### File: osu_bot_framework-v3/tools/logger.py
```python
from queue import Queue, Empty
import threading
# thread safe appending to file
class Logger:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.queue = Queue()
self.finished = False
threading.Thread(target=self.internal_writer).start()
def write(self, data):
self.queue.put(data)
def internal_writer(self):
while not self.finished:
try:
data = self.queue.get(True, 1)
except Empty:
continue
try:
f = open(*self.args, **self.kwargs)
f.write(data)
f.close()
except Exception as e:
f = open(*self.args, **self.kwargs)
f.write("-- An error occured here: " + str(e) + " --")
f.close()
self.queue.task_done()
def close(self):
self.queue.join()
self.finished = True
```
#### File: osu_bot_framework-v3/webapp/controller.py
```python
import json
import socket
import threading
import requests
from tools import crypto
from webapp.ws_server import ws_server
class Controller:
def __init__(self, bot, host="localhost", ws_port=9876, webapp_port=80, on_message_function=None):
self.bot = bot
self.__host = host
if not on_message_function:
on_message_function = self.__on_message
self.__ws = ws_server(host=host, port=ws_port, on_message_function=on_message_function)
self.__webapp_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.__webapp_port = webapp_port
self.__user_num = 20
self.__current_user_profile = {"username": "", "avatar_url": "", "country_code": "gb", "statistics": {"level": {"current": 0}, "global_rank": 0, "pp": 0, "hit_accuracy": 0, "play_count": 0}}
self.__making_room = False
self.crypto = crypto.CryptoWrapper(bot.get_password())
def __on_message(self, conn, msg):
if msg == "é" or msg == "\x03é":
return
if self.bot.is_authenticate():
msg = self.crypto.decrypt(msg)
data = {}
try:
data = json.loads(msg)
if not data["command"] == "update":
self.bot.log("-- webapp sent: " + msg + " --")
except:
if self.bot.is_authenticate():
self.send_message("authenticate", conn)
return
if data["command"] == "exit_handler":
self.bot.exit_handler()
elif data["command"] == "update":
self.update(conn)
elif data["command"] == "set_user_num":
self.__user_num = data["user_num"]
elif data["command"] == "start_match":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.start_match()
elif data["command"] == "abort_match":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.abort_match()
elif data["command"] == "channel_message":
data["message"] = data["message"].strip()
if data["message"]:
channel = self.bot.get_channel(data["channel"])
if channel:
channel.send_message(data["message"])
if channel.is_game():
message_arr = data["message"].lower().split(" ")
if len(message_arr) >= 2:
command = " ".join(message_arr[:2]).strip()
args = message_arr[2:]
if command == "!mp password":
channel.set_invite_link(channel.get_invite_link().replace(channel.get_password(), ""))
if args:
channel._password = <PASSWORD>[0]
channel.set_invite_link(channel.get_invite_link() + args[0])
else:
channel._password = ""
elif command == "!mp size":
if args:
channel._size = int(args[0])
elif command == "!abort" and channel.in_progress():
if channel.get_logic()["on_match_abort"]:
x = threading.Thread(target=channel.get_logic()["on_match_abort"])
x.setDaemon(True)
x.start()
self.bot.log("-- on match abort method executed --")
elif data["command"] == "personal_message":
if data["message"]:
self.bot.send_personal_message(data["channel"], data["message"])
elif data["command"] == "make_room":
self.__making_room = True
self.bot.make_room(title=data["title"], password=data["password"], game_mode=data["game_mode"], scoring_type=data["scoring_type"], team_type=data["team_type"], logic_profile=data["logic_profile"], invite_list=data["invite_list"], beatmapID=data["beatmapID"], size=data["size"])
self.__making_room = False
elif data["command"] == "join":
self.bot.join(data["channel"])
elif data["command"] == "part":
self.bot.part(data["channel"])
elif data["command"] == "set_logic_profile":
channel = self.bot.get_channel(data["channel"])
if channel:
channel.implement_logic_profile(data["profile"])
elif data["command"] == "close_room":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.close_room()
elif data["command"] == "set_game_mode":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_game_mode(data["game_mode"])
elif data["command"] == "set_team_type":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_team_type(data["team_type"])
elif data["command"] == "set_scoring_type":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_scoring_type(data["scoring_type"])
elif data["command"] == "set_mods":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_mods(data["mods"])
elif data["command"] == "set_map_status":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_map_status(data["map_status"])
elif data["command"] == "set_host":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_host(data["username"])
elif data["command"] == "kick_user":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.kick_user(data["username"])
elif data["command"] == "set_title":
if data["title"]:
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_title(data["title"])
elif data["command"] == "add_broadcast":
channel = self.bot.get_channel(data["channel"])
if channel:
channel.add_broadcast(data["message"], data["secs"])
elif data["command"] == "del_broadcast":
channel = self.bot.get_channel(data["channel"])
if channel:
channel.del_broadcast(data["broadcast_id"])
elif data["command"] == "add_player_blacklist":
if data["global"]:
self.bot.add_player_blacklist(data["username"])
else:
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.add_player_blacklist(data["username"])
elif data["command"] == "del_player_blacklist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.del_player_blacklist(data["username"])
elif data["command"] == "del_global_player_blacklist":
self.bot.del_player_blacklist(data["username"])
elif data["command"] == "del_artist_whitelist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.del_artist_whitelist(data["artist"])
elif data["command"] == "del_artist_blacklist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.del_artist_blacklist(data["artist"])
elif data["command"] == "del_creator_whitelist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.del_beatmap_creator_whitelist(data["creator"])
elif data["command"] == "del_creator_blacklist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.del_beatmap_creator_blacklist(data["creator"])
elif data["command"] == "add_artist_whitelist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.add_artist_whitelist(data["artist"])
elif data["command"] == "add_artist_blacklist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.add_artist_blacklist(data["artist"])
elif data["command"] == "add_creator_whitelist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.add_beatmap_creator_whitelist(data["creator"])
elif data["command"] == "add_creator_blacklist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.add_beatmap_creator_blacklist(data["creator"])
elif data["command"] == "del_beatmap_blacklist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.del_beatmap_blacklist(data["beatmapID"])
elif data["command"] == "del_beatmap_whitelist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.del_beatmap_whitelist(data["beatmapID"])
elif data["command"] == "add_beatmap_blacklist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.add_beatmap_blacklist(data["beatmapID"])
elif data["command"] == "add_beatmap_whitelist":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.add_beatmap_whitelist(data["beatmapID"])
elif data["command"] == "set_advanced_options":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_beatmap_checker(data["beatmap_checker"])
channel.set_allow_convert(data["allow_convert"])
channel.set_allow_unsubmitted(data["allow_unsubmitted"])
channel.maintain_title(data["maintain_title"])
channel.maintain_password(data["maintain_password"])
channel.maintain_size(data["maintain_size"])
channel.start_on_players_ready(data["autostart"])
channel.set_autostart_timer(True, data["autostart_timer"])
channel.set_welcome_message(data["welcome_message"])
channel.auto_download(data["auto_download"], data["auto_download_path"], data["auto_open"], data["download_video"])
self.bot.set_osu_directory(data["osu_directory"])
self.bot.chimu.set_redownload(data["redownload_owned_beatmaps"])
elif data["command"] == "set_password":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_password(data["password"])
elif data["command"] == "fetch_user_profile":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
if self.__current_user_profile["username"] != data["username"]:
self.__current_user_profile = self.bot.fetch_user_profile(data["username"])
self.__current_user_profile["country_code"] = self.__current_user_profile["country_code"].lower()
elif data["command"] == "set_ar_range":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_ar_range(data["range"])
elif data["command"] == "set_od_range":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_od_range(data["range"])
elif data["command"] == "set_cs_range":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_cs_range(data["range"])
elif data["command"] == "set_hp_range":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_hp_range(data["range"])
elif data["command"] == "set_bpm_range":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_bpm_range(data["range"])
elif data["command"] == "set_length_range":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_length_range(data["range"])
elif data["command"] == "set_diff_range":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_diff_range(data["range"])
elif data["command"] == "set_size":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.set_size(data["size"])
elif data["command"] == "import_config":
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.import_config(data["url"])
elif data["command"] == "clone_channel":
channel1 = self.bot.get_channel(data["channel1"])
channel2 = self.bot.get_channel(data["channel2"])
self.bot.clone_channel(channel1, channel2)
elif data["command"] == "refresh_logic_profiles":
self.bot.refresh_logic_profiles()
elif data["command"] == "del_logic_profile":
self.bot.del_logic_profile(data["profile"])
elif data["command"] == "download_logic_profile":
self.bot.logic_profile_download(data["url"])
elif data["command"] == "get_logic_profile_link":
self.bot.get_logic_profile_link(data["profile"])
elif data["command"] == "authenticate":
self.send_message("success", conn)
if "channel" in data:
channel = self.bot.get_channel(data["channel"])
if channel and channel.is_game():
channel.get_config_link()
def start(self, running=False):
if not running:
x = threading.Thread(target=self.start, args=(True,))
x.setDaemon(True)
x.start()
else:
# start websocket server
self.__ws.listen()
# start webapp server
self.__webapp_sock.bind((self.__host, self.__webapp_port))
self.__webapp_sock.listen()
# self.__update_loop()
self.bot.log("-- Webapp server started at http://" + self.__host + ":" + str(self.__webapp_port) + "/ --")
if not self.bot.verbose:
print("-- Webapp server started at http://" + self.__host + ":" + str(self.__webapp_port) + "/ --")
ws_host = self.__ws.get_host()
if ws_host == "0.0.0.0":
try:
ws_host = requests.get('https://checkip.amazonaws.com').text.strip()
except:
pass
while True:
try:
conn, addr = self.__webapp_sock.accept()
conn.recv(1024)
# header
text = 'HTTP/1.0 200 OK\n'
text += 'Content-Type: text/html\n'
text += 'Content-Type: text/html\n\n'
f = open("webapp/index.html", "r", encoding="utf8")
text += f.read()
f.close()
text = text.replace("ws://localhost:9876", "ws://" + ws_host + ":" + str(self.__ws.get_port()), 1)
try:
conn.sendall(text.encode())
except ConnectionAbortedError:
pass
conn.close()
except OSError:
return
def send_message(self, message, conn=None):
if not conn:
for conn in self.__ws.get_clients():
if self.bot.is_authenticate() and not message == "authenticate" and not message == "success":
message = self.crypto.encrypt(message)
self.__ws.send(conn, message)
else:
if self.bot.is_authenticate() and not message == "authenticate" and not message == "success":
message = self.crypto.encrypt(message)
self.__ws.send(conn, message)
def update(self, conn=None):
data = {"channels": {}}
channels = self.bot.get_channels().copy()
for channel in channels:
data["channels"][channel] = channels[channel].get_attributes()
data["channels"][channel]["total_users"] = len(data["channels"][channel]["users"])
data["channels"][channel]["users"] = data["channels"][channel]["users"][:self.__user_num]
if "mp_" in channel:
data["channels"][channel]["host"] = channels[channel].get_host()
else:
data["channels"][channel]["host"] = ""
data["channels"][channel]["in_progress"] = False
data["channels"][channel]["slots"] = {int(data["channels"][channel]["users"].index(user)): {"username": user} for user in data["channels"][channel]["users"]}
if "commands" in data["channels"][channel]:
del data["channels"][channel]["commands"]
data["pm"] = self.bot.get_personal_message_log()
data["logic_profiles"] = list(self.bot.get_logic_profiles().keys())
data["logic_profile_links"] = self.bot.get_logic_profile_links()
data["current_user_profile"] = self.__current_user_profile
data["bot_username"] = self.bot.get_username()
data["redownload_owned_beatmaps"] = self.bot.chimu.is_redownload()
data["osu_directory"] = self.bot.get_osu_directory()
data["making_room"] = self.__making_room
data["global_player_blacklist"] = self.bot.get_player_blacklist()
self.send_message(json.dumps(data), conn)
def set_ws_port(self, port):
self.__ws.set_port(port)
def set_webapp_port(self, port):
self.__webapp_port = port
def stop(self):
self.__ws.stop()
self.__webapp_sock.close()
def get_host(self):
return self.__host
``` |
{
"source": "jrandall/thrifty-builder",
"score": 2
} |
#### File: jrandall/thrifty-builder/setup.py
```python
from setuptools import setup, find_packages
from thriftybuilder.meta import VERSION, DESCRIPTION, PACKAGE_NAME, EXECUTABLE_NAME
try:
from pypandoc import convert
def read_markdown(file: str) -> str:
return convert(file, "rst")
except ImportError:
def read_markdown(file: str) -> str:
return open(file, "r").read()
setup(
name=PACKAGE_NAME,
version=VERSION,
packages=find_packages(exclude=["tests"]),
install_requires=open("requirements.txt", "r").readlines(),
url="https://github.com/wtsi-hgi/thrifty-builder",
license="MIT",
description=DESCRIPTION,
long_description=read_markdown("README.md"),
entry_points={
"console_scripts": [
f"{EXECUTABLE_NAME}={PACKAGE_NAME}.cli:entrypoint"
]
},
zip_safe=True
)
```
#### File: thrifty-builder/thriftybuilder/configuration.py
```python
import json
import re
from json import JSONEncoder, JSONDecoder
from typing import Iterable, Dict, Callable
import os
import yaml
from hgijson import JsonPropertyMapping, MappingJSONEncoderClassBuilder, MappingJSONDecoderClassBuilder
from jinja2 import Template
from thriftybuilder.build_configurations import DockerBuildConfiguration
from thriftybuilder.containers import BuildConfigurationContainer
from thriftybuilder.storage import ChecksumStorage, DiskChecksumStorage, ConsulChecksumStorage, MemoryChecksumStorage
DOCKER_PROPERTY = "docker"
DOCKER_IMAGES_PROPERTY = "images"
DOCKER_IMAGE_NAME_PROPERTY = "name"
DOCKER_IMAGE_DOCKERFILE_PROPERTY = "dockerfile"
DOCKER_IMAGE_CONTEXT_PROPERTY = "context"
DOCKER_IMAGE_TAGS_PROPERTY = "tags"
DOCKER_IMAGE_ALWAYS_UPLOAD_PROPERTY = "always_upload"
DOCKER_REGISTRIES_PROPERTY = "registries"
DOCKER_REGISTRY_URL = "url"
DOCKER_REGISTRY_USERNAME = "username"
DOCKER_REGISTRY_PASSWORD = "password"
CHECKSUM_STORAGE_PROPERTY = "checksum_storage"
CHECKSUM_STORAGE_TYPE_PROPERTY = "type"
CHECKSUM_STORAGE_TYPE_VALUE_MAP = {
DiskChecksumStorage: "local",
ConsulChecksumStorage: "consul",
MemoryChecksumStorage: "stdio"
}
CHECKSUM_STORAGE_TYPE_LOCAL_PATH_PROPERTY = "path"
CHECKSUM_STORAGE_TYPE_CONSUL_DATA_KEY_PROPERTY = "key"
CHECKSUM_STORAGE_TYPE_CONSUL_LOCK_KEY_PROPERTY = "lock"
CHECKSUM_STORAGE_TYPE_CONSUL_URL_PROPERTY = "url"
CHECKSUM_STORAGE_TYPE_CONSUL_TOKEN_PROPERTY = "token"
class DockerRegistry:
"""
Docker registry.
"""
@staticmethod
def default_repository_location_formatter(docker_registry: "DockerRegistry", image_name: str) -> str:
builder = [docker_registry.url]
if docker_registry.namespace is not None:
builder.append(docker_registry.namespace)
builder.append(image_name)
return "/".join(builder)
def __init__(self, url: str, namespace: str=None, username: str=None, password: str=None,
repository_location_formatter: Callable[["DockerRegistry", str], str]
=default_repository_location_formatter.__func__):
"""
Constructor.
:param url: URL of the docker repository (should not include protocol but will be stripped if present)
:param namespace: namespace of repository
:param username: username of upload user
:param password: <PASSWORD>
:param repository_location_formatter: used to form the repository location for image upload
"""
self.url = re.sub(".*//", "", url)
self.username = username
self.password = password
self.namespace = namespace
self.repository_location_formatter = repository_location_formatter
def get_repository_location(self, image_name: str) -> str:
return self.repository_location_formatter(self, image_name)
def __str__(self):
return json.dumps({"url": self.url, "namespace": self.namespace, "username": self.username,
"password": self.password})
class Configuration:
"""
Build configuration.
"""
def __init__(self, docker_build_configurations: BuildConfigurationContainer[DockerBuildConfiguration]=None,
docker_registries: Iterable[DockerRegistry]=(), checksum_storage: ChecksumStorage=None):
self.docker_build_configurations = docker_build_configurations if docker_build_configurations is not None \
else BuildConfigurationContainer[DockerBuildConfiguration]()
self.docker_registries = list(docker_registries)
self.checksum_storage = checksum_storage if checksum_storage is not None else MemoryChecksumStorage()
def read_configuration(location: str) -> Configuration:
"""
Reads the configuration file in the given location.
:param location: location of the configuration file
:return: parsed configuration from file
"""
location = _process_path(location)
with open(location, "r") as file:
file_context = file.read()
rendered_file_contents = Template(file_context).render(env=os.environ)
raw_configuration = yaml.load(rendered_file_contents)
# Pre-process to convert relative paths to absolute
paths_relative_to = os.path.abspath(os.path.dirname(location))
if CHECKSUM_STORAGE_TYPE_LOCAL_PATH_PROPERTY in raw_configuration.get(CHECKSUM_STORAGE_PROPERTY, {}):
path = raw_configuration[CHECKSUM_STORAGE_PROPERTY][CHECKSUM_STORAGE_TYPE_LOCAL_PATH_PROPERTY]
raw_configuration[CHECKSUM_STORAGE_PROPERTY][CHECKSUM_STORAGE_TYPE_LOCAL_PATH_PROPERTY] = _process_path(
path, paths_relative_to)
raw_docker_images = raw_configuration.get(DOCKER_PROPERTY, {}).get(DOCKER_IMAGES_PROPERTY, [])
for raw_docker_image in raw_docker_images:
raw_docker_image[DOCKER_IMAGE_DOCKERFILE_PROPERTY] = _process_path(
raw_docker_image[DOCKER_IMAGE_DOCKERFILE_PROPERTY], paths_relative_to)
if DOCKER_IMAGE_CONTEXT_PROPERTY in raw_docker_image:
raw_docker_image[DOCKER_IMAGE_CONTEXT_PROPERTY] = _process_path(
raw_docker_image[DOCKER_IMAGE_CONTEXT_PROPERTY], paths_relative_to)
return ConfigurationJSONDecoder().decode_parsed(raw_configuration)
def _process_path(path: str, path_relative_to: str=os.getcwd()) -> str:
"""
Processes the given path.
:param path: path to process
:param path_relative_to: path to make given path relative to if it is relative
:return: absolute path
"""
path = os.path.expanduser(path)
return os.path.join(path_relative_to, path) if not os.path.isabs(path) else path
_disk_checksum_storage_mappings = [
JsonPropertyMapping(CHECKSUM_STORAGE_TYPE_LOCAL_PATH_PROPERTY, "storage_file_location", "storage_file_location")
]
DiskChecksumStorageJSONEncoder = MappingJSONEncoderClassBuilder(
DiskChecksumStorage, _disk_checksum_storage_mappings).build()
DiskChecksumStorageJSONDecoder = MappingJSONDecoderClassBuilder(
DiskChecksumStorage, _disk_checksum_storage_mappings).build()
_consul_checksum_storage_mappings = [
JsonPropertyMapping(CHECKSUM_STORAGE_TYPE_CONSUL_DATA_KEY_PROPERTY, "data_key", "data_key"),
JsonPropertyMapping(CHECKSUM_STORAGE_TYPE_CONSUL_LOCK_KEY_PROPERTY, "lock_key", "lock_key", optional=True),
JsonPropertyMapping(CHECKSUM_STORAGE_TYPE_CONSUL_URL_PROPERTY, "url", "url", optional=True),
JsonPropertyMapping(CHECKSUM_STORAGE_TYPE_CONSUL_TOKEN_PROPERTY, "token", "token", optional=True)
]
ConsulChecksumStorageJSONEncoder = MappingJSONEncoderClassBuilder(
ConsulChecksumStorage, _consul_checksum_storage_mappings).build()
ConsulChecksumStorageJSONDecoder = MappingJSONDecoderClassBuilder(
ConsulChecksumStorage, _consul_checksum_storage_mappings).build()
class ChecksumStorageJSONDecoder(JSONDecoder):
def decode(self, obj_as_json, **kwargs):
parsed_json = super().decode(obj_as_json)
if parsed_json[CHECKSUM_STORAGE_TYPE_PROPERTY] == CHECKSUM_STORAGE_TYPE_VALUE_MAP[MemoryChecksumStorage]:
return MemoryChecksumStorage()
return {
CHECKSUM_STORAGE_TYPE_VALUE_MAP[DiskChecksumStorage]: DiskChecksumStorageJSONDecoder(),
CHECKSUM_STORAGE_TYPE_VALUE_MAP[ConsulChecksumStorage]: ConsulChecksumStorageJSONDecoder()
}[parsed_json[CHECKSUM_STORAGE_TYPE_PROPERTY]].decode(obj_as_json)
class ChecksumStorageJSONEncoder(JSONEncoder):
def default(self, obj):
if isinstance(obj, MemoryChecksumStorage):
encoded = {}
else:
encoded = {
DiskChecksumStorage: DiskChecksumStorageJSONEncoder(),
ConsulChecksumStorage: ConsulChecksumStorageJSONEncoder()
}[type(obj)].default(obj)
encoded.update({
CHECKSUM_STORAGE_TYPE_PROPERTY: CHECKSUM_STORAGE_TYPE_VALUE_MAP[type(obj)]
})
return encoded
_docker_build_configuration_mappings = [
JsonPropertyMapping(DOCKER_IMAGE_NAME_PROPERTY,
object_constructor_parameter_name="image_name",
object_property_getter=lambda obj: obj.identifier),
JsonPropertyMapping(DOCKER_IMAGE_DOCKERFILE_PROPERTY, "dockerfile_location", "dockerfile_location"),
JsonPropertyMapping(DOCKER_IMAGE_CONTEXT_PROPERTY, "context", "context", optional=True),
JsonPropertyMapping(DOCKER_IMAGE_TAGS_PROPERTY, "tags", "tags", optional=True, collection_factory=set),
JsonPropertyMapping(DOCKER_IMAGE_ALWAYS_UPLOAD_PROPERTY, "always_upload", "always_upload", optional=True)
]
DockerBuildConfigurationJSONEncoder = MappingJSONEncoderClassBuilder(
DockerBuildConfiguration, _docker_build_configuration_mappings).build()
DockerBuildConfigurationJSONDecoder = MappingJSONDecoderClassBuilder(
DockerBuildConfiguration, _docker_build_configuration_mappings).build()
_docker_registry_mappings = [
JsonPropertyMapping(DOCKER_REGISTRY_URL, "url", "url"),
JsonPropertyMapping(DOCKER_REGISTRY_USERNAME, "username", optional=True),
JsonPropertyMapping(DOCKER_REGISTRY_PASSWORD, "password", optional=True)
]
DockerRegistryJSONEncoder = MappingJSONEncoderClassBuilder(DockerRegistry, _docker_registry_mappings).build()
DockerRegistryJSONDecoder = MappingJSONDecoderClassBuilder(DockerRegistry, _docker_registry_mappings).build()
_configuration_mappings = [
JsonPropertyMapping(
DOCKER_IMAGES_PROPERTY, "docker_build_configurations", "docker_build_configurations",
collection_factory=BuildConfigurationContainer, parent_json_properties=[DOCKER_PROPERTY],
encoder_cls=DockerBuildConfigurationJSONEncoder, decoder_cls=DockerBuildConfigurationJSONDecoder),
JsonPropertyMapping(
DOCKER_REGISTRIES_PROPERTY, "docker_registries", "docker_registries", parent_json_properties=[DOCKER_PROPERTY],
encoder_cls=DockerRegistryJSONEncoder, decoder_cls=DockerRegistryJSONDecoder, optional=True),
JsonPropertyMapping(
CHECKSUM_STORAGE_PROPERTY, "checksum_storage", "checksum_storage", encoder_cls=ChecksumStorageJSONEncoder,
decoder_cls=ChecksumStorageJSONDecoder, optional=True)
]
ConfigurationJSONEncoder = MappingJSONEncoderClassBuilder(Configuration, _configuration_mappings).build()
ConfigurationJSONDecoder = MappingJSONDecoderClassBuilder(Configuration, _configuration_mappings).build()
```
#### File: thrifty-builder/thriftybuilder/storage.py
```python
import json
import os
from abc import ABCMeta, abstractmethod
from copy import copy
from typing import Optional, Dict, Mapping, Type
from urllib.parse import urlparse
from thriftybuilder.common import MissingOptionalDependencyError
class ChecksumRetriever(metaclass=ABCMeta):
"""
Retriever of mappings between configurations, identified by ID, and checksums.
"""
@abstractmethod
def get_checksum(self, configuration_id: str) -> Optional[str]:
"""
Gets the checksum associated to the given configuration ID.
:param configuration_id: the ID of the configuration
:return: the associated checksum or `None` if none stored
"""
@abstractmethod
def get_all_checksums(self) -> Dict[str, str]:
"""
Gets all of the identifer -> checksum mappings.
:return: all stored mappings
"""
class ChecksumStorage(ChecksumRetriever, metaclass=ABCMeta):
"""
Store of mappings between configurations, identified by ID, and checksums.
"""
@abstractmethod
def set_checksum(self, configuration_id: str, checksum: str):
"""
Sets the checksum associated to the given configuration ID.
:param configuration_id: the ID of the configuration
:param checksum: the checksum associated to the configuration
"""
def __init__(self, configuration_checksum_mappings: Mapping[str, str]=None):
if configuration_checksum_mappings is not None:
self.set_all_checksums(configuration_checksum_mappings)
def __str__(self) -> str:
return json.dumps(self.get_all_checksums(), sort_keys=True)
def __hash__(self) -> hash:
return hash(str(self))
def set_all_checksums(self, configuration_checksum_mappings: Mapping[str, str]):
"""
Sets all of the checksums from the given id-checksum mappings.
:param configuration_checksum_mappings: id-checksum mappings
"""
for configuration_id, checksum in configuration_checksum_mappings.items():
self.set_checksum(configuration_id, checksum)
class MemoryChecksumStorage(ChecksumStorage):
"""
In-memory storage for configuration -> checksum mappings.
"""
def __init__(self, configuration_checksum_mappings: Mapping[str, str]=None):
self._data: Dict[str, str] = {}
super().__init__(configuration_checksum_mappings)
def get_checksum(self, configuration_id: str) -> Optional[str]:
return self._data.get(configuration_id, None)
def get_all_checksums(self) -> Dict[str, str]:
return copy(self._data)
def set_checksum(self, configuration_id: str, checksum: str):
self._data[configuration_id] = checksum
class DoubleSourceChecksumStorage(ChecksumStorage):
"""
Checksum storage that has primary checksum storage, which is used by default, and a secondary checksum source that
is consulted if a checksum is not in the primary source.
"""
def __init__(self, local_checksum_storage: ChecksumStorage, external_checksum_retriever: ChecksumRetriever):
"""
Constructor.
:param local_checksum_storage: local checksum storage (takes precedence over external source)
:param external_checksum_retriever: external checksum retrieval source (for when checksum is not in local
storage)
"""
super().__init__()
self.primary_checksum_storage = local_checksum_storage
self.secondary_checksum_retriever = external_checksum_retriever
def get_checksum(self, configuration_id: str) -> Optional[str]:
primary_checksum = self.primary_checksum_storage.get_checksum(configuration_id)
if primary_checksum is not None:
return primary_checksum
return self.secondary_checksum_retriever.get_checksum(configuration_id)
def get_all_checksums(self) -> Dict[str, str]:
return {**self.secondary_checksum_retriever.get_all_checksums(),
**self.primary_checksum_storage.get_all_checksums()}
def set_checksum(self, configuration_id: str, checksum: str):
self.primary_checksum_storage.set_checksum(configuration_id, checksum)
class DiskChecksumStorage(ChecksumStorage):
"""
On-disk storage for configuration -> checksum mappings.
This storage was created to quickly get persistence - concurrent access is unsafe!
"""
def __init__(self, storage_file_location: str, *args, **kwargs):
self.storage_file_location = storage_file_location
super().__init__(*args, **kwargs)
def get_checksum(self, configuration_id: str) -> Optional[str]:
return self.get_all_checksums().get(configuration_id, None)
def get_all_checksums(self) -> Dict[str, str]:
if not os.path.exists(self.storage_file_location):
return {}
with open(self.storage_file_location, "r") as file:
return json.load(file)
def set_checksum(self, configuration_id: str, checksum: str):
configuration = None
if not os.path.exists(self.storage_file_location):
configuration = {}
with open(self.storage_file_location, "a+") as file:
file.seek(0)
if configuration is None:
configuration = json.load(file)
file.seek(0)
file.truncate()
configuration[configuration_id] = checksum
file.write(json.dumps(configuration))
class ConsulChecksumStorage(ChecksumStorage):
"""
Consul storage for configuration -> checksum mappings.
"""
CONSUL_HTTP_TOKEN_ENVIRONMENT_VARIABLE = "CONSUL_HTTP_TOKEN"
CONSUL_SESSION_LOCK_DEFAULT_TIMEOUT = 120
TEXT_ENCODING = "utf-8"
_IMPORT_MISSING_ERROR_MESSAGE = "To use Consul storage, please install the requirements in " \
"`consul-requirements.txt`"
@staticmethod
def _load_consul_class() -> Type:
"""
Loads the Consul class at run time (optional requirement).
:return: the Consul class
:raises MissingOptionalDependencyError: if a required dependency is not installed
"""
try:
from consul import Consul
except ImportError as e:
raise MissingOptionalDependencyError(ConsulChecksumStorage._IMPORT_MISSING_ERROR_MESSAGE) from e
return Consul
@staticmethod
def _load_consul_lock_manager() -> Type:
"""
Loads the ConsulLockManager class at run time (optional requirement).
:return: the Consul class
:raises MissingOptionalDependencyError: if a required dependency is not installed
"""
try:
from consullock.managers import ConsulLockManager
except ImportError as e:
raise MissingOptionalDependencyError(ConsulChecksumStorage._IMPORT_MISSING_ERROR_MESSAGE) from e
return ConsulLockManager
@property
def url(self) -> str:
return self._consul_client.http.base_uri
@property
def token(self) -> str:
return self._consul_client.token
def __init__(self, data_key: str, lock_key: str, url: str=None, token: str=None, consul_client=None,
configuration_checksum_mappings: Mapping[str, str] = None):
Consul = ConsulChecksumStorage._load_consul_class()
ConsulLockManager = ConsulChecksumStorage._load_consul_lock_manager()
if url is not None and consul_client is not None:
raise ValueError("Cannot use both `url` and `consul_client`")
self.data_key = data_key
self.lock_key = lock_key
consul_client_kwargs: Dict = {}
if url is not None:
parsed_url = urlparse(url)
consul_client_kwargs["host"] = parsed_url.hostname
consul_client_kwargs["port"] = parsed_url.port
consul_client_kwargs["scheme"] = parsed_url.scheme if len(parsed_url.scheme) > 0 else "http"
self._consul_client = consul_client if consul_client is not None else Consul(**consul_client_kwargs)
if token is None:
token = os.environ.get(ConsulChecksumStorage.CONSUL_HTTP_TOKEN_ENVIRONMENT_VARIABLE, None)
if token is not None:
# Work around for https://github.com/cablehead/python-consul/issues/170
self._consul_client.token = token
self._consul_client.http.session.headers.update({"X-Consul-Token": token})
self._lock_manager = ConsulLockManager(
consul_client=self._consul_client,
session_ttl_in_seconds=ConsulChecksumStorage.CONSUL_SESSION_LOCK_DEFAULT_TIMEOUT)
super().__init__(configuration_checksum_mappings)
def get_checksum(self, configuration_id: str) -> Optional[str]:
return self.get_all_checksums().get(configuration_id)
def get_all_checksums(self) -> Dict[str, str]:
value = self._consul_client.kv.get(self.data_key)[1]
if value is None:
return {}
value = value["Value"].decode(ConsulChecksumStorage.TEXT_ENCODING)
return json.loads(value)
def set_checksum(self, configuration_id: str, checksum: str):
with self._lock_manager.acquire(self.lock_key):
value = self.get_all_checksums()
value[configuration_id] = checksum
self._consul_client.kv.put(self.data_key, json.dumps(value, sort_keys=True))
def set_all_checksums(self, configuration_checksum_mappings: Mapping[str, str]):
with self._lock_manager.acquire(self.lock_key):
value = self.get_all_checksums()
value.update(configuration_checksum_mappings)
self._consul_client.kv.put(self.data_key, json.dumps(value, sort_keys=True))
```
#### File: thriftybuilder/tests/_common.py
```python
import dockerfile
import os
import unittest
from abc import ABCMeta
from tempfile import mkdtemp, NamedTemporaryFile
from typing import List, Dict, Optional, Tuple, Iterable
import docker
import shutil
import yaml
from consul import Consul
from docker.errors import ImageNotFound, NullResource, NotFound
from useintest.modules.consul import ConsulServiceController, ConsulDockerisedService
from useintest.services.builders import DockerisedServiceControllerTypeBuilder
from useintest.services.models import DockerisedService
from thriftybuilder.build_configurations import DockerBuildConfiguration
from thriftybuilder.configuration import ConfigurationJSONEncoder, Configuration
DOCKERFILE_PATH = "Dockerfile"
FROM_DOCKER_COMMAND = "FROM"
RUN_DOCKER_COMMAND = "RUN"
ADD_DOCKER_COMMAND = "ADD"
COPY_DOCKER_COMMAND = "COPY"
_RANDOM_NAME = object()
# To avoid a nasty circular dependency, DO NOT move this import up
from thriftybuilder.tests._examples import name_generator, EXAMPLE_FROM_IMAGE_NAME
def create_docker_setup(
*, commands: Iterable[str]=None, context_files: Dict[str, Optional[str]]=None,
image_name: str=_RANDOM_NAME, tags: List[str]=None, always_upload: bool=False,
from_image_name: str=EXAMPLE_FROM_IMAGE_NAME) -> Tuple[str, DockerBuildConfiguration]:
"""
Creates a Docker setup.
:param commands: commands to put in the Dockerfile. If `None` and `from_image_name` is set, FROM will be set
:param context_files: dictionary where the key is the name of the context file and the value is its content
:param image_name: name of the image to setup a build configuration for
:param tags: list of strings to tag the built image with
:param from_image_name: the image that the setup one is based off (FROM added to commands if not `None`)
:return: tuple where the first element is the directory that acts as the context and the second is the associated
build configuration
"""
if from_image_name is not None:
from_command = f"{FROM_DOCKER_COMMAND} {from_image_name}"
if commands is None:
commands = (from_command, )
else:
commands = (from_command, *commands)
parsed_commands = dockerfile.parse_string("\n".join(commands))
if len([command.cmd for command in parsed_commands if command.cmd.lower() == FROM_DOCKER_COMMAND.lower()]) != 1:
raise ValueError(f"Exactly one \"{FROM_DOCKER_COMMAND}\" command is expected: {commands}")
context_files = context_files if context_files is not None else {}
image_name = image_name if image_name != _RANDOM_NAME else f"{name_generator()}"
temp_directory = mkdtemp()
dockerfile_location = os.path.join(temp_directory, DOCKERFILE_PATH)
with open(dockerfile_location, "w") as file:
for command in commands:
file.write(f"{command}\n")
for location, value in context_files.items():
absolute_location = os.path.join(temp_directory, location)
os.makedirs(os.path.dirname(absolute_location), exist_ok=True)
with open(absolute_location, "w") as file:
if value is None:
value = ""
file.write(value)
return temp_directory, DockerBuildConfiguration(image_name, dockerfile_location, tags=tags, always_upload=always_upload)
class TestWithDockerBuildConfiguration(unittest.TestCase, metaclass=ABCMeta):
"""
Superclass for a test case that uses Docker build configurations.
"""
def setUp(self):
super().setUp()
self.docker_client = docker.from_env()
self._setup_locations: List[str] = []
self.images_to_delete: List[str] = []
def tearDown(self):
super().tearDown()
for location in self._setup_locations:
shutil.rmtree(location)
docker_client = docker.from_env()
# Nasty OO to avoid multiple-inheritance method invocation ordering problems
if isinstance(self, TestWithDockerRegistry):
additional: List[str] = []
for identifier in self.images_to_delete:
additional.append(f"{self.registry_location}/{identifier}")
self.images_to_delete.extend(additional)
for identifier in self.images_to_delete:
try:
docker_client.images.remove(identifier)
except (ImageNotFound, NullResource):
pass
docker_client.close()
self.docker_client.close()
def create_docker_setup(self, **kwargs) -> Tuple[str, DockerBuildConfiguration]:
"""
See `create_docker_setup`.
"""
setup_location, build_configuration = create_docker_setup(**kwargs)
self._setup_locations.append(setup_location)
self.images_to_delete.append(build_configuration.identifier)
return setup_location, build_configuration
def create_dependent_docker_build_configurations(self, number: int) -> List[DockerBuildConfiguration]:
configurations = []
for i in range(number):
image_name = name_generator(f"{i}-")
from_image_name = EXAMPLE_FROM_IMAGE_NAME if i == 0 else configurations[i - 1].identifier
_, configuration = self.create_docker_setup(image_name=image_name, from_image_name=from_image_name)
configurations.append(configuration)
return configurations
class TestWithConsulService(unittest.TestCase, metaclass=ABCMeta):
"""
Base class for tests that use a Consul service.
"""
@property
def consul_service(self) -> ConsulDockerisedService:
if self._consul_service is None:
self._consul_service = self._consul_controller.start_service()
return self._consul_service
@property
def consul_client(self) -> Consul:
if self._consul_client is None:
self._consul_client = self.consul_service.create_consul_client()
return self._consul_client
def setUp(self):
self._consul_controller = ConsulServiceController()
self._consul_service = None
self._consul_client = None
super().setUp()
def tearDown(self):
if self._consul_service is not None:
self._consul_controller.stop_service(self._consul_service)
class TestWithDockerRegistry(unittest.TestCase, metaclass=ABCMeta):
"""
Base class for tests that use a (local) Docker registry.
"""
_RegistryServiceController = DockerisedServiceControllerTypeBuilder(
repository="registry",
tag="2",
name="_RegistryServiceController",
start_detector=lambda log_line: "listening on" in log_line,
ports=[5000]).build()
@property
def registry_location(self) -> str:
return f"{self._registry_service.host}:{self._registry_service.port}"
@property
def _registry_service(self) -> DockerisedService:
if self._docker_registry_service is None:
self._docker_registry_service = self._registry_controller.start_service()
return self._docker_registry_service
def setUp(self):
self._registry_controller = TestWithDockerRegistry._RegistryServiceController()
self._docker_registry_service = None
super().setUp()
def tearDown(self):
super().tearDown()
if self._docker_registry_service is not None:
self._registry_controller.stop_service(self._docker_registry_service)
def is_uploaded(self, configuration: DockerBuildConfiguration) -> bool:
# Note: change to context manager if `DockerClient` gets support for one in the future
if len(configuration.tags) == 0:
return False
docker_client = docker.from_env()
try:
for tag in configuration.tags:
try:
docker_client.images.pull(f"{self.registry_location}/{configuration.name}", tag=tag)
except NotFound:
return False
return True
finally:
docker_client.close()
class TestWithConfiguration(unittest.TestCase, metaclass=ABCMeta):
"""
Base class for tests that use a configuration.
"""
def setUp(self):
super().setUp()
self._file_configuration_locations: List[str] = []
def tearDown(self):
super().tearDown()
for location in self._file_configuration_locations:
os.remove(location)
def configuration_to_file(self, configuration: Configuration) -> str:
"""
Writes the given configuration to a temp file.
:param configuration: the configuration to write to file
:return: location of the written file
"""
temp_file = NamedTemporaryFile(delete=False)
self._file_configuration_locations.append(temp_file.name)
file_configuration_as_json = ConfigurationJSONEncoder().default(configuration)
with open(temp_file.name, "w") as file:
yaml.dump(file_configuration_as_json, file, default_style="\"")
return temp_file.name
```
#### File: thriftybuilder/tests/_examples.py
```python
from uuid import uuid4
from thriftybuilder.tests._common import RUN_DOCKER_COMMAND
EXAMPLE_IMAGE_NAME = "hello-world-test"
EXAMPLE_FROM_IMAGE_NAME = "alpine"
EXAMPLE_IMAGE_NAME_1 = "example-1"
EXAMPLE_IMAGE_NAME_2 = "example-2"
EXAMPLE_FILE_NAME_1 = "example-1"
EXAMPLE_FILE_CONTENTS_1 = "testing1"
EXAMPLE_FILE_NAME_2 = "example-2"
EXAMPLE_FILE_CONTENTS_2 = "testing2"
EXAMPLE_RUN_COMMAND = f"{RUN_DOCKER_COMMAND} echo test"
EXAMPLE_1_CONFIGURATION_ID = "example-configuration-id-1"
EXAMPLE_1_CHECKSUM = "c02696b94a1787cdbe072931225d4dbc"
EXAMPLE_2_CONFIGURATION_ID = "example-configuration-id-2"
EXAMPLE_2_CHECKSUM = "f9f601085a99e4e1531bdad52771084b"
EXAMPLE_1_CONSUL_KEY = "example-key-1"
EXAMPLE_2_CONSUL_KEY = "example-key-2"
def name_generator(identifier: str= "") -> str:
"""
Generates a unique name.
:param identifier: identifier to add to the name
:return: the generated name
"""
return f"thrifty-builder-test-{identifier}{uuid4()}"
```
#### File: thriftybuilder/tests/test_build_configurations.py
```python
import os
import unittest
import tempfile
from thriftybuilder.meta import PACKAGE_NAME
from thriftybuilder.build_configurations import DockerBuildConfiguration, _ADD_DOCKER_COMMAND, \
_COPY_DOCKER_COMMAND, DOCKER_IGNORE_FILE
from thriftybuilder.containers import BuildConfigurationContainer
from thriftybuilder.tests._common import TestWithDockerBuildConfiguration, TestWithConfiguration, DOCKERFILE_PATH
from thriftybuilder.tests._examples import EXAMPLE_IMAGE_NAME, EXAMPLE_FROM_IMAGE_NAME, EXAMPLE_FILE_NAME_1
from thriftybuilder.configuration import Configuration, read_configuration
_EXAMPLE_TAG_1 = "example-tag-1"
_EXAMPLE_TAG_2 = "example-tag-2"
class TestBuildConfigurationContainer(TestWithDockerBuildConfiguration):
"""
Tests for `BuildConfigurationContainer`.
"""
def setUp(self):
super().setUp()
_, self.configuration = self.create_docker_setup()
self.container = BuildConfigurationContainer[DockerBuildConfiguration]()
def test_setup_with_items(self):
configurations = [self.create_docker_setup(image_name=f"{PACKAGE_NAME}-{i}")[1] for i in range(5)]
container = BuildConfigurationContainer(configurations)
self.assertCountEqual(configurations, container)
def test_len(self):
length = 5
self.container.add_all([self.create_docker_setup(image_name=f"{PACKAGE_NAME}-{i}")[1] for i in range(length)])
self.assertEqual(length, len(self.container))
def test_index_when_not_added(self):
_, default = self.create_docker_setup()
try:
self.container[self.configuration.identifier]
except KeyError:
pass
def test_index(self):
self.container.add(self.configuration)
self.assertEqual(self.configuration, self.container[self.configuration.identifier])
def test_get_when_not_added(self):
_, default = self.create_docker_setup()
self.assertEqual(default, self.container.get(self.configuration.identifier, default=default))
def test_get(self):
self.container.add(self.configuration)
self.assertEqual(self.configuration, self.container.get(self.configuration.identifier))
def test_add_when_not_added(self):
self.container.add(self.configuration)
self.assertCountEqual([self.configuration], self.container)
def test_add_when_added(self):
_, configuration_2 = self.create_docker_setup(image_name=self.configuration.identifier)
self.container.add(self.configuration)
self.container.add(configuration_2)
self.assertCountEqual([configuration_2], self.container)
def test_add_all(self):
configurations = [self.create_docker_setup(image_name=f"{PACKAGE_NAME}-{i}")[1] for i in range(5)]
self.container.add_all(configurations)
self.assertCountEqual(configurations, self.container)
def test_remove_when_not_added(self):
self.assertRaises(KeyError, self.container.remove, self.configuration)
def test_remove(self):
self.container.add(self.configuration)
assert len(self.container) == 1 and self.container[self.configuration.identifier] == self.configuration
self.container.remove(self.configuration)
self.assertEqual(0, len(self.container))
class TestDockerBuildConfiguration(TestWithDockerBuildConfiguration):
"""
Tests for `DockerBuildConfiguration`.
"""
def test_identifier(self):
_, configuration = self.create_docker_setup(image_name=EXAMPLE_IMAGE_NAME)
self.assertEqual(EXAMPLE_IMAGE_NAME, configuration.identifier)
def test_invalid_identifier(self):
with self.assertRaises(ValueError):
self.create_docker_setup(image_name=f"{EXAMPLE_IMAGE_NAME}:")
def test_requires(self):
_, configuration = self.create_docker_setup(from_image_name=EXAMPLE_FROM_IMAGE_NAME)
self.assertCountEqual([EXAMPLE_FROM_IMAGE_NAME], configuration.requires)
def test_used_files_when_none_added(self):
_, configuration = self.create_docker_setup()
self.assertCountEqual([], configuration.used_files)
def test_used_files_when_one_add(self):
context_directory, configuration = self.create_docker_setup(
commands=(f"{_ADD_DOCKER_COMMAND} {EXAMPLE_FILE_NAME_1} /example", ),
context_files={EXAMPLE_FILE_NAME_1: None})
used_files = (os.path.relpath(file, start=context_directory) for file in configuration.used_files)
self.assertCountEqual([EXAMPLE_FILE_NAME_1], used_files)
def test_used_files_when_add_directory(self):
directory = "test"
example_file_paths = [f"{directory}/{suffix}" for suffix in ["a", "b", "c/d/e", "c/d/f"]]
context_directory, configuration = self.create_docker_setup(
commands=(f"{_ADD_DOCKER_COMMAND} {directory} /example", ),
context_files={file_path: None for file_path in example_file_paths})
used_files = (os.path.relpath(file, start=context_directory) for file in configuration.used_files)
self.assertCountEqual(example_file_paths, used_files)
def test_used_files_when_multiple_add(self):
example_file_paths = ["a", "b", "c/d"]
context_directory, configuration = self.create_docker_setup(
commands=[f"{_ADD_DOCKER_COMMAND} {file_path} /{file_path}" for file_path in example_file_paths],
context_files={file_path: None for file_path in example_file_paths})
used_files = (os.path.relpath(file, start=context_directory) for file in configuration.used_files)
self.assertCountEqual(example_file_paths, used_files)
def test_used_files_when_multiple_add_and_copy(self):
example_add_file_paths = ("a", "b", "c/d", "e/f/g")
copy_add_commands = []
for i in range(len(example_add_file_paths)):
command = _ADD_DOCKER_COMMAND if i % 2 == 0 else _COPY_DOCKER_COMMAND
copy_add_commands.append(f"{command} {example_add_file_paths[i]} /{example_add_file_paths[i]}")
context_directory, configuration = self.create_docker_setup(
commands=copy_add_commands,
context_files={file_path: None for file_path in example_add_file_paths})
used_files = (os.path.relpath(file, start=context_directory) for file in configuration.used_files)
self.assertCountEqual(example_add_file_paths, used_files)
def test_from_image_name(self):
_, configuration = self.create_docker_setup(from_image_name=EXAMPLE_FROM_IMAGE_NAME)
self.assertEqual(EXAMPLE_FROM_IMAGE_NAME, configuration.from_image)
def test_dockerfile_location(self):
context_location, configuration = self.create_docker_setup()
self.assertEqual(os.path.join(context_location, DOCKERFILE_PATH), configuration.dockerfile_location)
def test_always_upload_false(self):
context_location, configuration = self.create_docker_setup(always_upload=False)
self.assertFalse(configuration.always_upload)
def test_always_upload_true(self):
context_location, configuration = self.create_docker_setup(always_upload=True)
self.assertTrue(configuration.always_upload)
def test_context(self):
context_location, configuration = self.create_docker_setup()
self.assertEqual(context_location, configuration.context)
def test_get_ignored_files_when_no_ignore_file(self):
_, configuration = self.create_docker_setup()
self.assertEqual(0, len(configuration.get_ignored_files()))
def test_get_ignored_files_when_ignore_file(self):
ignore_file_patterns = (".abc", "abc", "*.tmp", "all/tmp/*")
files_to_ignore = (".abc", "abc", "test/abc", "test/test/abc", "test/test/.abc", "test/test/this.tmp",
"all/tmp/files")
other_files = ("test/abc.abc", "other")
_, configuration = self.create_docker_setup(context_files=dict(
**{file_name: None for file_name in files_to_ignore},
**{file_name: None for file_name in other_files},
**{DOCKER_IGNORE_FILE: "\n".join(ignore_file_patterns)}))
self.assertCountEqual((f"{configuration.context}/{file_name}" for file_name in files_to_ignore),
configuration.get_ignored_files())
def test_tags(self):
tags = ["version", "latest"]
other_tag = "other"
_, configuration = self.create_docker_setup(image_name=f"{EXAMPLE_IMAGE_NAME}:{other_tag}", tags=tags)
self.assertSetEqual(set(tags + [other_tag]), configuration.tags)
def test_default_tag(self):
default_tag = "latest" # FIXME maybe not hard code this value?
_, configuration = self.create_docker_setup(image_name=f"{EXAMPLE_IMAGE_NAME}")
self.assertEqual(1, len(configuration.tags))
self.assertEqual(default_tag, list(configuration.tags)[0])
class TestDockerBuildConfigurationWithConfiguration(TestWithDockerBuildConfiguration, TestWithConfiguration):
def test_full_docker_build_configuration(self):
context_location, conf = self.create_docker_setup()
docker_build_config = DockerBuildConfiguration(image_name="example-image-name", dockerfile_location=conf.dockerfile_location, context=context_location, tags=["{{ env['EXAMPLE_TAG_1'] }}","{{ env['EXAMPLE_TAG_2'] }}","example-non-templated-tag"], always_upload=True)
configuration = Configuration(docker_build_configurations=BuildConfigurationContainer[DockerBuildConfiguration]([docker_build_config,]))
configuration_location = self.configuration_to_file(configuration)
os.environ["EXAMPLE_TAG_1"] = _EXAMPLE_TAG_1
os.environ["EXAMPLE_TAG_2"] = _EXAMPLE_TAG_2
configuration = read_configuration(configuration_location)
self.assertEqual(1, len(configuration.docker_build_configurations))
docker_build_config = list(configuration.docker_build_configurations)[0]
self.assertEqual("example-image-name", docker_build_config.name)
self.assertEqual("example-image-name", docker_build_config.identifier)
self.assertEqual(conf.dockerfile_location, docker_build_config.dockerfile_location)
self.assertEqual(context_location, docker_build_config.context)
self.assertSetEqual(set([_EXAMPLE_TAG_1, _EXAMPLE_TAG_2, "example-non-templated-tag"]), docker_build_config.tags)
self.assertTrue(docker_build_config.always_upload)
def test_full_docker_build_configuration_from_string(self):
context_location_1, conf_1 = self.create_docker_setup()
context_location_2, conf_2 = self.create_docker_setup()
with tempfile.NamedTemporaryFile(delete=False) as config_file:
config_file.write(f"""
"checksum_storage":
"type": "stdio"
"docker":
"images":
- "always_upload": true
"context": "{context_location_1}"
"dockerfile": "{conf_1.dockerfile_location}"
"name": "example-image-name-1"
"tags":
- "example-non-templated-tag"
- "{{{{ env['EXAMPLE_TAG_1'] }}}}"
- "always_upload": false
"context": "{context_location_2}"
"dockerfile": "{conf_2.dockerfile_location}"
"name": "example-image-name-2"
"tags":
- "{{{{ env['EXAMPLE_TAG_2'] }}}}"
- "example-non-templated-tag"
"registries": []
""".encode())
configuration_location = config_file.name
config_file.close()
os.environ["EXAMPLE_TAG_1"] = _EXAMPLE_TAG_1
os.environ["EXAMPLE_TAG_2"] = _EXAMPLE_TAG_2
configuration = read_configuration(configuration_location)
self.assertEqual(2, len(configuration.docker_build_configurations))
docker_build_config_1 = configuration.docker_build_configurations.get("example-image-name-1")
self.assertEqual("example-image-name-1", docker_build_config_1.name)
self.assertEqual("example-image-name-1", docker_build_config_1.identifier)
self.assertEqual(conf_1.dockerfile_location, docker_build_config_1.dockerfile_location)
self.assertEqual(context_location_1, docker_build_config_1.context)
self.assertSetEqual(set([_EXAMPLE_TAG_1, "example-non-templated-tag"]), docker_build_config_1.tags)
self.assertTrue(docker_build_config_1.always_upload)
docker_build_config_2 = configuration.docker_build_configurations.get("example-image-name-2")
self.assertEqual("example-image-name-2", docker_build_config_2.name)
self.assertEqual("example-image-name-2", docker_build_config_2.identifier)
self.assertEqual(conf_2.dockerfile_location, docker_build_config_2.dockerfile_location)
self.assertEqual(context_location_2, docker_build_config_2.context)
self.assertSetEqual(set([_EXAMPLE_TAG_2, "example-non-templated-tag"]), docker_build_config_2.tags)
self.assertFalse(docker_build_config_2.always_upload)
if __name__ == "__main__":
unittest.main()
```
#### File: thriftybuilder/tests/test_storage.py
```python
import os
import unittest
from abc import ABCMeta, abstractmethod
from tempfile import NamedTemporaryFile
from thriftybuilder.storage import ChecksumStorage, MemoryChecksumStorage, DiskChecksumStorage, ConsulChecksumStorage, \
DoubleSourceChecksumStorage
from thriftybuilder.tests._common import TestWithConsulService
from thriftybuilder.tests._examples import EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM, EXAMPLE_2_CONFIGURATION_ID, \
EXAMPLE_2_CHECKSUM, EXAMPLE_1_CONSUL_KEY, EXAMPLE_2_CONSUL_KEY
class _TestChecksumStorage(unittest.TestCase, metaclass=ABCMeta):
"""
Tests for `ChecksumStorage` subclasses.
"""
@abstractmethod
def create_storage(self) -> ChecksumStorage:
"""
Creates storage manager to be tested.
:return: the created storage manager
"""
def setUp(self):
super().setUp()
self.storage = self.create_storage()
def test_get_when_not_set(self):
self.assertIsNone(self.storage.get_checksum(EXAMPLE_1_CONFIGURATION_ID))
def test_get_when_multiple(self):
self.storage.set_checksum("other", "value")
self.storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM)
self.assertEqual(EXAMPLE_1_CHECKSUM, self.storage.get_checksum(EXAMPLE_1_CONFIGURATION_ID))
def test_get_all_checksums_when_none(self):
self.assertEqual(0, len(self.storage.get_all_checksums()))
def test_get_all_checksums(self):
self.storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM)
self.storage.set_checksum(EXAMPLE_2_CONFIGURATION_ID, EXAMPLE_2_CHECKSUM)
self.assertEqual({EXAMPLE_1_CONFIGURATION_ID: EXAMPLE_1_CHECKSUM, EXAMPLE_2_CONFIGURATION_ID: EXAMPLE_2_CHECKSUM},
self.storage.get_all_checksums())
def test_set_when_not_set(self):
self.storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM)
self.assertEqual({EXAMPLE_1_CONFIGURATION_ID: EXAMPLE_1_CHECKSUM}, self.storage.get_all_checksums())
def test_set_when_set(self):
self.storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, "old")
self.storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM)
self.assertEqual({EXAMPLE_1_CONFIGURATION_ID: EXAMPLE_1_CHECKSUM}, self.storage.get_all_checksums())
def test_set_all_checksums(self):
self.storage.set_all_checksums(
{EXAMPLE_1_CONFIGURATION_ID: EXAMPLE_1_CHECKSUM, EXAMPLE_2_CONFIGURATION_ID: EXAMPLE_2_CHECKSUM})
self.assertEqual(EXAMPLE_1_CHECKSUM, self.storage.get_checksum(EXAMPLE_1_CONFIGURATION_ID))
self.assertEqual(EXAMPLE_2_CHECKSUM, self.storage.get_checksum(EXAMPLE_2_CONFIGURATION_ID))
class TestMemoryChecksumStorage(_TestChecksumStorage):
"""
Tests for `MemoryChecksumStorage`.
"""
def create_storage(self) -> ChecksumStorage:
return MemoryChecksumStorage()
class TestDoubleSourceChecksumStorage(_TestChecksumStorage):
"""
Tests for `DoubleSourceChecksumStorage`.
"""
def setUp(self):
self.local_storage = MemoryChecksumStorage()
self.remote_storage = MemoryChecksumStorage()
super().setUp()
def create_storage(self) -> ChecksumStorage:
return DoubleSourceChecksumStorage(self.local_storage, self.remote_storage)
def test_get_when_only_in_local(self):
self.local_storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM)
self.assertEqual({EXAMPLE_1_CONFIGURATION_ID: EXAMPLE_1_CHECKSUM}, self.storage.get_all_checksums())
def test_get_when_only_in_external(self):
self.remote_storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM)
self.assertEqual({EXAMPLE_1_CONFIGURATION_ID: EXAMPLE_1_CHECKSUM}, self.storage.get_all_checksums())
def test_get_when_in_local_and_external(self):
self.local_storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM)
self.remote_storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_2_CHECKSUM)
self.assertEqual(EXAMPLE_1_CHECKSUM, self.storage.get_checksum(EXAMPLE_1_CONFIGURATION_ID))
self.assertEqual({EXAMPLE_1_CONFIGURATION_ID: EXAMPLE_1_CHECKSUM}, self.storage.get_all_checksums())
def test_set_only_affects_local(self):
self.storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM)
self.assertEqual({EXAMPLE_1_CONFIGURATION_ID: EXAMPLE_1_CHECKSUM}, self.local_storage.get_all_checksums())
self.assertEqual({}, self.remote_storage.get_all_checksums())
class TestDiskChecksumStorage(_TestChecksumStorage):
"""
Tests for `DiskChecksumStorage`.
"""
def setUp(self):
self._temp_file = NamedTemporaryFile().name
super().setUp()
def tearDown(self):
if os.path.exists(self._temp_file):
os.remove(self._temp_file)
def create_storage(self) -> ChecksumStorage:
return DiskChecksumStorage(self._temp_file)
class TestConsulChecksumStorage(_TestChecksumStorage, TestWithConsulService):
"""
Tests for `ConsulChecksumStorage`.
"""
def create_storage(self) -> ChecksumStorage:
return ConsulChecksumStorage(EXAMPLE_1_CONSUL_KEY, EXAMPLE_2_CONSUL_KEY, consul_client=self.consul_client)
def test_create_init_with_explicit_configuration(self):
storage = ConsulChecksumStorage(EXAMPLE_1_CONSUL_KEY, EXAMPLE_2_CONSUL_KEY,
url=self.consul_client.http.base_uri, token=self.consul_client.token)
storage.set_checksum(EXAMPLE_1_CONFIGURATION_ID, EXAMPLE_1_CHECKSUM)
self.assertEqual(EXAMPLE_1_CHECKSUM, storage.get_checksum(EXAMPLE_1_CONFIGURATION_ID))
del _TestChecksumStorage
if __name__ == "__main__":
unittest.main()
``` |
{
"source": "jrandiny/greedy-24",
"score": 3
} |
#### File: jrandiny/greedy-24/gui.py
```python
import pygame
import time
import os, sys, math, pygame, pygame.font, pygame.image
from pygame.locals import Rect
import utils
import random
import processor.greedy5 as processor
import abc
'''Image credit
Intro screen - https://pbs.twimg.com/media/DlE5j74XsAANQDX.jpg
Main Screen - http://goldwakepress.org/data/Gaming-Tutorial-Blackjack-Casino-Game-Tutorial.jpg
End Screen - https://gifer.com/en/9qR4
Card image - https://www.daniweb.com/attachments/0/Cards_gif.zip
'''
#global var
state = 1
#warna
gray = (128,128,128)
white = (255,255,255)
silver= (192,192,192)
navy =(0,0,128)
black =(0,0,0)
cyan=(0,255,255)
aqua =(127,255,212)
#text welcome
class textWavey:
def __init__(self, font, message, fontcolor, amount=10):
self.base = font.render(message, 0, fontcolor)
self.steps = range(0, self.base.get_width(), 2)
self.amount = amount
self.size = self.base.get_rect().inflate(0, amount).size
self.offset = 0.0
def animate(self):
s= pygame.Surface(self.size)
s.set_colorkey((0,0,0))
height = self.size[1]
self.offset += 0.1
for step in self.steps:
src = Rect(step, 0, 2, height)
dst = src.move(0, math.cos(self.offset + step*.02)*self.amount)
s.blit(self.base, dst, src)
return s
class screen(abc.ABC):
@abc.abstractmethod
def loop(self):
pass
@abc.abstractmethod
def eventLoop(self):
pass
class introScreen(screen):
def __init__(self):
bigfont = pygame.font.SysFont(None, 60)
self.renderer = textWavey(bigfont, 'Welcome..', white, 14)
def switchToMain(self):
global state
state = 2
def loop(self):
x = (width*0.001)
y = (height*0.001)
jokerImg = pygame.image.load('assets/joker1.png')
screen.blit(jokerImg, (x,y))
text = self.renderer.animate()
screen.blit(text, (300, 400))
button('Start',350,480,100,50,gray,silver,self.switchToMain)
def eventLoop(self):
pass
class mainScreen(screen):
def __init__(self,deck):
self.prog = -99
self.cardHover = False
self.repickShape = True
self.lastDeck = deck
self.lastDeckSym = []
self.ekspresi = ""
self.poin = 0
self.cardX = 350
self.cardY = 200
self.eks = pygame.image.load('assets/eks.png')
self.score =pygame.image.load('assets/score.png')
self.poinI = pygame.image.load('assets/poin.png')
self.cardIBack1 = pygame.image.load('assets/back1.png').convert()
self.cardIBack2 = pygame.image.load('assets/back2.png').convert()
self.jackImg = pygame.image.load('assets/kartu.png')
self.endI = pygame.image.load("assets/end.gif")
def switchToEnd(self):
global state
state = 3
def updateParam(self,deck,poin, ekspresi):
self.poin = poin
self.ekspresi = ekspresi
self.lastDeck = deck
self.repickShape = True
self.prog = 0
#untuk ambil kartu
def pick_Card(self,deck,total):
if len(deck)>0:
out = utils.pick4card(deck)
poin,ekspresi= processor.calculate(out[0])
total+=poin
print(out[0])
return [False,out[1],total,ekspresi]
else:
return [True,[],total,None]
def animateSys(self,prog):
targetPosX = [-300,-100,100,300]
targetPosY = 200
originPosX = 350
originPosY = 200
if (len(self.lastDeck)>0):
if(self.repickShape):
self.lastDeckSym = ['S','H','D','C']
for i in range(1,4):
shape = ['S','H','D','C']
s = random.choice(shape)
self.lastDeckSym[i] = s
self.repickShape = False
for i in range(1,5):
x = originPosX + (targetPosX[i-1]) * (prog/100.0)
y = originPosY + (targetPosY) * (prog/100.0)
card(x,y,self.lastDeck[i-1],self.lastDeckSym[i-1])
else:
x = (width*0.001)
y = (height*0.001)
screen.blit(self.jackImg, (x,y))
button('Reshuffle',350,300,100,50,cyan,aqua,game_loop)
button('Exit ?',350,500,100,50,cyan,aqua,self.switchToEnd)
def loop(self):
x = (width*0.001)
y = (height*0.001)
cardX = 350 # x coordinate of card
cardY = 200 # y coordinate of card
screen.blit(self.jackImg, (x,y))
if self.cardHover:
cardI = self.cardIBack2
else:
cardI = self.cardIBack1
screen.blit(cardI,(cardX,cardY))
screen.blit(self.eks,(150,550))
screen.blit(self.score,(100,50))
screen.blit(self.poinI,(450,50))
poin_s=utils.countScore(self.ekspresi)
message_display(self.ekspresi,400,575)
message_display(str(round(poin_s,2)),500,70)
message_display(str(round(self.poin,2)),150,65)
if(self.prog!=-99):
if(self.prog<100):
self.prog+=1
self.animateSys(self.prog)
def eventLoop(self):
mouse = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
if self.cardX+50 > mouse[0] > self.cardX and self.cardY+80 > mouse[1] > self.cardY:
self.cardHover = True
if click[0]==1 :
_,deck,poin,ekspresi=self.pick_Card(self.lastDeck,self.poin)
self.updateParam(deck,poin,ekspresi)
else:
self.cardHover = False
class endingScreen(screen):
def __init__(self):
self.endI = pygame.image.load("assets/end.gif")
def switchToM(self):
global state
state = 2
def loop(self):
x = (width*0.001)
y = (height*0.001)
screen.blit(self.endI, (x,y))
button('Bye',150,330,100,50,white,cyan,quit)
button('Play again',450,330,100,50,white,cyan,self.switchToM)
def eventLoop(self):
pass
def text_objects(text, font):
textSurface = font.render(text, True, black)
return textSurface, textSurface.get_rect()
#untuk kartu
def card (x,y,nilai,s):
nilai=str(nilai)
cardImg =pygame.image.load('assets/'+s+nilai+'.gif')
screen.blit(cardImg, (x,y))
#untuk kalimat
def message_display(text,x,y):
a = pygame.font.SysFont('Times New Roman',35)
TextSurf, TextRect = text_objects(text,a)
TextRect.center =(x,y)
screen.blit(TextSurf, TextRect)
#start button atau button lain
def button(msg,x,y,w,h,ic,ac,action=None):
mouse = pygame.mouse.get_pos()
click = pygame.mouse.get_pressed()
if x+w > mouse[0] > x and y+h > mouse[1] > y:
pygame.draw.rect(screen, ac,(x,y,w,h))
if click[0] == 1 and action != None:
action()
else:
pygame.draw.rect(screen, ic,(x,y,w,h))
smallText = pygame.font.SysFont("comicsansms",20)
textSurf, textRect = text_objects(msg, smallText)
textRect.center = ( (x+(w/2)), (y+(h/2)) )
screen.blit(textSurf, textRect)
def game_loop():
global state
deck = utils.getNewDeck()
intro = introScreen()
main = mainScreen(deck)
end = endingScreen()
screenObj = [intro, main, end]
while 1:
screenObj[state-1].loop()
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
screenObj[state-1].eventLoop()
pygame.display.flip()
if __name__ == '__main__':
#insialisasi pywindow
pygame.font.init()
(width, height) = (800, 600)
screen = pygame.display.set_mode((width, height))
pygame.display.flip()
pygame.display.set_caption("24 game")
game_loop()
```
#### File: greedy-24/processor/greedy2.py
```python
import math
import utils
def getName():
return "<NAME>"
def calculate(param):
input = list(param)
listoperations = [] #list kombinasi operasi
listnumbers = input
num = []
listnumbers.sort(reverse=True) #integer urutan angka
for i in range(4):
num.append(str(listnumbers[i]))
i=0
target = 24
cek = True
while (i<3):
m = listnumbers[i]
n = listnumbers[i+1]
if (target>=0):
if(m <= ((math.sqrt(target))) and n!=1):
if (n!=1):
listoperations.append('*')
listnumbers[i+1] = (m*n)
else:
listoperations.append('+')
if(cek==True):
target = target -m-n
else:
target = target -n
cek = False
else:
listoperations.append('+')
if(cek==True):
target = target -m-n
else:
target = target -n
cek = False
else:
listoperations.append('-')
if(cek==True):
target = target -m+n
else:
target = target +n
cek = False
i = i+1
listoperations.append('')
data = list(zip(num,listoperations))
for k in range(len(data)):
data[k] = ''.join(data[k])
data = ''.join(data)
return [utils.countScore(data),data]
```
#### File: greedy-24/processor/greedy6.py
```python
import utils
def getName():
return "<NAME>"
def calculate(param):
inputNum = list(param)
inputNum.sort(reverse = True)
input = []
for num in inputNum:
input.append(str(num))
pairs = [[8,3,'*'],[6,4,'*'],[12,2,'*'],[25,1,'-'],[24,0,'+'],[23,1,'+'],[26,2,'-'],[27,3,'-'],[22,2,'+'],[21,3,'+']]
posiblePick= [[[1,2],[3,4]],[[1,3],[2,4]],[[1,4],[2,2]]]
lowestDelta = 99
lowestConfLeft = ""
lowestConfRight = ""
lowestConfCenter = ""
print(input)
for pair in pairs:
for pick in posiblePick:
for swap in [True,False]:
if(swap):
pick.reverse()
for internalSwap in [True,False]:
if(internalSwap):
pick[0].reverse()
pick[1].reverse()
for opr in ['+','-','*','/']:
for opr2 in ['+','-','*','/']:
deltaLeft = abs(eval(input[pick[0][0]-1] + opr + input[pick[0][1]-1])-pair[0])
deltaRight = abs(eval(input[pick[1][0]-1] + opr2 + input[pick[1][1]-1])-pair[1])
if((deltaLeft+deltaRight)<lowestDelta):
lowestConfLeft = "("+input[pick[0][0]-1] + opr + input[pick[0][1]-1]+")"
lowestConfRight = "("+input[pick[1][0]-1] + opr2 + input[pick[1][1]-1]+")"
lowestConfCenter = pair[2]
lowestDelta = deltaLeft+deltaRight
maxExpr = lowestConfLeft+lowestConfCenter+lowestConfRight
return [utils.countScore(maxExpr),maxExpr]
``` |
{
"source": "jrandiny/pam-android-fingerprint",
"score": 3
} |
#### File: pam-android-fingerprint/Desktop/main.py
```python
import requests
import pyotp
import ConfigParser
DEFAULT_USER = "nobody"
CONFIG_LOC = "/home/joshua/Dev/pam-android-fingerprint/Desktop/config"
def pam_sm_authenticate(pamh, flags, argv):
print "Preparing to authenticate using fingerprint"
config = ConfigParser.ConfigParser()
config.readfp(open(CONFIG_LOC, 'r'))
server_url = config.get('SERVER', 'url')
secret = config.get('SECRET', 'secret')
try:
user = pamh.get_user(None)
except pamh.exception, e:
return e.pam_result
if user == None:
pamh.user = DEFAULT_USER
totp = pyotp.TOTP(secret)
print "Requesting fingerprint from " + server_url
try:
r = requests.get(server_url + "/identity", timeout=1)
content = r.json()
except:
print "Phone not connected"
return pamh.PAM_AUTH_ERR
if content["identity"] != "Fingerprint Server":
print "Phone not connected"
return pamh.PAM_AUTH_ERR
try:
r = requests.get(server_url + "/token", timeout=30)
content = r.json()
except:
print "Connection error"
return pamh.PAM_AUTH_ERR
if totp.verify(content["token"]):
print "Authenticated using fingerprint"
return pamh.PAM_SUCCESS
else:
print "Authentication failed"
return pamh.PAM_AUTH_ERR
def pam_sm_setcred(pamh, flags, argv):
return pamh.PAM_SUCCESS
def pam_sm_acct_mgmt(pamh, flags, argv):
return pamh.PAM_SUCCESS
def pam_sm_open_session(pamh, flags, argv):
return pamh.PAM_SUCCESS
def pam_sm_close_session(pamh, flags, argv):
return pamh.PAM_SUCCESS
def pam_sm_chauthtok(pamh, flags, argv):
return pamh.PAM_SUCCESS
``` |
{
"source": "jrandj/FPL-draft-picker",
"score": 3
} |
#### File: jrandj/FPL-draft-picker/ConsolidatedData.py
```python
import pandas as pd
from Team import Team
from OfficialAPIData import OfficialAPIData
from ProjectionsData import ProjectionsData
class ConsolidatedData:
"""
A class that contains projections data from Fantasy Football Scout.
Attributes
----------
teamName : str
The name of the team.
leagueID : sequence
The unique identifier for the league.
fantasyFootballScoutUsername : str
The username used to authenticate to Fantasy Football Scout.
fantasyFootballScoutPassword : str
The password used to authenticate to Fantasy Football Scout.
officialAPIData : object
An instance of OfficialAPIData.
projectionsData : object
An instance of ProjectionsData.
teamID : object
The players associated with teamName.
nextGameWeek : str
The upcoming game week.
Methods
-------
get_formations()
Return team formations in descending order with the highest scoring at the top.
add_total_points_to_players()
Add the total projected scores to the players object.
add_candidates_to_players_based_on_projections()
Find candidates who have a better six game projection than existing players in the team and add the list
to the players object.
get_teamID_from_teamName()
Gets the unique identifier for the team from the teamName.
"""
def __init__(self, fantasyFootballScoutUsername, fantasyFootballScoutPassword, teamName, leagueID):
self.teamName = teamName
self.leagueID = leagueID
self.fantasyFootballScoutUsername = fantasyFootballScoutUsername
self.fantasyFootballScoutPassword = <PASSWORD>
# OfficialAPIData.__init__(self, self.leagueID)
# ProjectionsData.__init__(self, self.fantasyFootballScoutUsername, self.fantasyFootballScoutPassword)
self.officialAPIData = OfficialAPIData(self.leagueID)
self.projectionsData = ProjectionsData(self.fantasyFootballScoutUsername, self.fantasyFootballScoutPassword)
self.teamID = self.get_teamID_from_teamName()
self.add_candidates_to_players_based_on_projections()
self.nextGameWeek = self.projectionsData.sixGameProjections[0].columns.values[-8]
@staticmethod
def get_formations(team, nextGameWeekHeader):
"""Return team formations in descending order with the highest scoring at the top.
Parameters
----------
team : dict
The JSON containing player data from a team.
nextGameWeekHeader : string
The key for the projected points of the upcoming game week.
Raises
------
"""
formations = [{'GKP': 1, 'DEF': 5, 'MID': 3, 'FWD': 2, 'Score': 0},
{'GKP': 1, 'DEF': 5, 'MID': 4, 'FWD': 1, 'Score': 0},
{'GKP': 1, 'DEF': 5, 'MID': 2, 'FWD': 3, 'Score': 0},
{'GKP': 1, 'DEF': 4, 'MID': 3, 'FWD': 3, 'Score': 0},
{'GKP': 1, 'DEF': 4, 'MID': 5, 'FWD': 1, 'Score': 0},
{'GKP': 1, 'DEF': 4, 'MID': 4, 'FWD': 2, 'Score': 0},
{'GKP': 1, 'DEF': 3, 'MID': 5, 'FWD': 2, 'Score': 0},
{'GKP': 1, 'DEF': 3, 'MID': 4, 'FWD': 3, 'Score': 0}]
player_index = 0
total_points = 0
current_formation = {'GKP': 0, 'DEF': 0, 'MID': 0, 'FWD': 0}
team.sort(key=lambda x: (x['position_name'], -x[nextGameWeekHeader]))
for formation in formations:
team_copy = team.copy()
while current_formation != formation and len(team_copy) > player_index:
current_player = team_copy[player_index]
# This approach assumes the team is sorted by projected points in the next game week
if Team.add_player_to_formation(current_player, current_formation, formation):
total_points += current_player[nextGameWeekHeader]
del team_copy[player_index]
player_index = 0
else:
player_index = player_index + 1
formation['Score'] = round(total_points, 2)
total_points = 0
player_index = 0
current_formation = {'GKP': 0, 'DEF': 0, 'MID': 0, 'FWD': 0}
formations.sort(key=lambda x: (-x['Score']))
return formations
def add_total_points_to_players(self):
"""Add the total projected scores to the players object.
Parameters
----------
Raises
------
"""
df = pd.DataFrame.from_dict(self.officialAPIData.players['elements'])
# Left join fplPlayerData onto season projections using a key of player name, team name and position name.
# We need to drop duplicates because the projections data does not have additional data to ensure a 1:1 join.
df1 = df.merge(self.projectionsData.seasonProjections[0], how='left',
left_on=['web_name_clean', 'team_name', 'position_name'],
right_on=['Name', 'Team', 'Pos'], indicator='merge_status_season').drop_duplicates(
subset=['id']).drop(columns=['Name', 'Team', 'Pos', 'FPL Price', 'Mins', 'G', 'A', 'CS', 'Bonus', 'YC'])
d1 = df1.to_dict(orient='records')
for i in range(len(d1)):
self.officialAPIData.players['elements'][i]['merge_status_season'] = d1[i]['merge_status_season']
self.officialAPIData.players['elements'][i]['FPL_Pts'] = d1[i]['FPL Pts']
return
def add_candidates_to_players_based_on_projections(self):
"""Find candidates who have a better six game projection than existing players in the team and add the list
to the players object.
Parameters
----------
Raises
------
"""
df = pd.DataFrame.from_dict(self.officialAPIData.players['elements'])
sixGameProjection = self.projectionsData.sixGameProjections[0].columns.values[-2]
nextGameWeek = self.projectionsData.sixGameProjections[0].columns.values[-8]
nextGameWeekPlusOne = self.projectionsData.sixGameProjections[0].columns.values[-7]
nextGameWeekPlusTwo = self.projectionsData.sixGameProjections[0].columns.values[-6]
nextGameWeekPlusThree = self.projectionsData.sixGameProjections[0].columns.values[-5]
nextGameWeekPlusFour = self.projectionsData.sixGameProjections[0].columns.values[-4]
nextGameWeekPlusFive = self.projectionsData.sixGameProjections[0].columns.values[-3]
# Left join fplPlayerData onto six game projections using a key of player name, team name and position name.
# We need to drop duplicates because the projections data does not have additional data to ensure a 1:1 join.
df1 = df.merge(self.projectionsData.sixGameProjections[0], how='left',
left_on=['web_name_clean', 'team_name', 'position_name'],
right_on=['Name', 'Team', 'Pos'], indicator='merge_status_six_game').drop_duplicates(
subset=['id'])
d1 = df1.to_dict(orient='records')
for i in range(len(d1)):
candidates = {}
candidates_this_gw = {}
ict_index_candidates = {}
self.officialAPIData.players['elements'][i][sixGameProjection] = d1[i][sixGameProjection]
self.officialAPIData.players['elements'][i][nextGameWeek] = d1[i][nextGameWeek]
self.officialAPIData.players['elements'][i][nextGameWeekPlusOne] = d1[i][nextGameWeekPlusOne]
self.officialAPIData.players['elements'][i][nextGameWeekPlusTwo] = d1[i][nextGameWeekPlusTwo]
self.officialAPIData.players['elements'][i][nextGameWeekPlusThree] = d1[i][nextGameWeekPlusThree]
self.officialAPIData.players['elements'][i][nextGameWeekPlusFour] = d1[i][nextGameWeekPlusFour]
self.officialAPIData.players['elements'][i][nextGameWeekPlusFive] = d1[i][nextGameWeekPlusFive]
self.officialAPIData.players['elements'][i]['merge_status_six_game'] = d1[i]['merge_status_six_game']
if d1[i]['selected'] == self.teamID:
for j in range(len(d1)):
if (d1[j][sixGameProjection] > d1[i][sixGameProjection]) and (d1[i]['Pos'] == d1[j]['Pos']) and \
(d1[j]['selected'] == 'No') and (d1[j]['available'] == 'Yes'):
candidates[d1[j]['web_name']] = d1[j][sixGameProjection]
if (d1[j][nextGameWeek] > d1[i][nextGameWeek]) and (d1[i]['Pos'] == d1[j]['Pos']) and \
(d1[j]['selected'] == 'No') and (d1[j]['available'] == 'Yes'):
candidates_this_gw[d1[j]['web_name']] = d1[j][nextGameWeek]
if (float(d1[j]['ict_index']) > float(d1[i]['ict_index'])) and (d1[i]['Pos'] == d1[j]['Pos']) and \
(d1[j]['selected'] == 'No') and (d1[j]['available'] == 'Yes'):
ict_index_candidates[d1[j]['web_name']] = float(d1[j]['ict_index'])
sorted_candidates = sorted(candidates.items(), key=lambda x: x[1], reverse=True)
sorted_candidates_this_gw = sorted(candidates_this_gw.items(), key=lambda x: x[1], reverse=True)
ict_index_candidates = sorted(ict_index_candidates.items(), key=lambda x: x[1], reverse=True)
self.officialAPIData.players['elements'][i]['candidates'] = sorted_candidates
self.officialAPIData.players['elements'][i]['candidates_this_gw'] = sorted_candidates_this_gw
self.officialAPIData.players['elements'][i]['ict_index_candidates'] = ict_index_candidates
return
def get_teamID_from_teamName(self):
"""Gets the unique identifier for the team from the teamName.
Parameters
----------
Raises
------
SystemExit:
If the teamName cannot be found in the leagueID.
"""
found = 0
for i in self.officialAPIData.league['league_entries']:
if i['entry_name'] == self.teamName:
teamID = i['entry_id']
found = 1
if found == 0:
print("Team " + self.teamName + " not found in league " + self.leagueID + ".")
raise SystemExit()
return teamID
```
#### File: jrandj/FPL-draft-picker/fpldraft.py
```python
import pandas as pd
import argparse
import time
from ConsolidatedData import ConsolidatedData
from Fixture import Fixture
from Team import Team
pd.options.mode.chained_assignment = None # default='warn'
class Draft:
"""
The top level class for the FPL-Draft application.
Attributes
----------
leagueID : sequence
The unique identifier for the league.
teamName : str
The name of the team.
fantasyFootballScoutUsername : str
The username used to authenticate to Fantasy Football Scout.
fantasyFootballScoutPassword : str
The password used to authenticate to Fantasy Football Scout.
consolidatedData : object
An instance of ConsolidatedData.
fixture : object
An instance of Fixture.
team : object
An instance of Team.
Methods
-------
parse_input()
Parse the user input.
write_results()
Write the results to file.
"""
def __init__(self):
self.leagueID, self.teamName, self.fantasyFootballScoutUsername, self.fantasyFootballScoutPassword = \
self.parse_input()
self.consolidatedData = ConsolidatedData(self.fantasyFootballScoutUsername, self.fantasyFootballScoutPassword,
self.teamName,
self.leagueID)
self.fixture = Fixture(self.consolidatedData)
self.team = Team(self.teamName, self.consolidatedData.teamID, self.consolidatedData)
@staticmethod
def parse_input():
"""Parse the user input.
Parameters
----------
Raises
------
"""
ap = argparse.ArgumentParser()
ap.add_argument("-leagueID", required=True, help="The minileague")
ap.add_argument("-teamName", required=True, help="The team")
ap.add_argument("-fantasyFootballScoutUsername", required=True, help="Username for Fantasy Football Scout")
ap.add_argument("-fantasyFootballScoutPassword", required=True, help="Password for Fantasy Football Scout")
args = vars(ap.parse_args())
leagueID = args.get('leagueID')
teamName = args.get('teamName')
fantasyFootballScoutUsername = args.get('fantasyFootballScoutUsername')
fantasyFootballScoutPassword = args.get('fantasyFootballScoutPassword')
return leagueID, teamName, fantasyFootballScoutUsername, fantasyFootballScoutPassword
def write_results(self):
"""Write the results to file.
Parameters
----------
Raises
------
"""
f = open("fpldraft-results-" + time.strftime("%Y%d%m-%H%M%S") + ".html", "w", encoding='utf-8')
f.write(self.fixture.representation)
f.write(self.team.candidates_representation)
f.write(self.team.formations_representation)
f.close()
def main():
draft = Draft()
draft.write_results()
if __name__ == "__main__":
main()
```
#### File: jrandj/FPL-draft-picker/Team.py
```python
import math
from collections import OrderedDict
from tabulate import tabulate
class Team:
"""
A class that represents a team.
Attributes
----------
teamName : str
The name of the team.
teamID : str
The unique identifier of the team.
consolidatedData : object
An instance of ConsolidatedData.
playersInTeam : sequence
A list of the players in the team.
formations : sequence
A list of the possible formations for the team and their projected points total in the next fixture.
candidates_representation:
A subset of players with six game projections added.
formations_representation: str
A representation of the formations of the team.
Methods
-------
get_players_for_team()
Return the players in the team.
get_formations_for_team()
Return team formations in descending order with the highest scoring at the top.
add_player_to_formation
Attempt to add a player to a formation.
generate_candidates_representation():
Generate a representation of the candidates.
generate_formations_representation():
Generate a representation of the formations of the team.
"""
def __init__(self, teamName, teamID, consolidatedData):
self.teamName = teamName
self.teamID = teamID
self.consolidatedData = consolidatedData
self.playersInTeam = self.get_players_for_team(self.teamID, self.consolidatedData)
self.formations = self.get_formations_for_team(self.playersInTeam, self.consolidatedData)
self.candidates_representation = self.generate_candidates_representation()
self.formations_representation = self.generate_formations_representation()
@staticmethod
def get_players_for_team(teamID, consolidatedData):
"""Return the players in the team.
Parameters
----------
teamID : str
The unique identifier of the team.
consolidatedData : object
An instance of ConsolidatedData.
Raises
------
"""
team = []
for i in range(len(consolidatedData.officialAPIData.players['elements'])):
if consolidatedData.officialAPIData.players['elements'][i]['selected'] == teamID:
team.append(consolidatedData.officialAPIData.players['elements'][i])
return team
@staticmethod
def get_formations_for_team(playersInTeam, consolidatedData):
"""Return team formations in descending order with the highest scoring at the top.
Parameters
----------
playersInTeam : sequence
A list of the players in the team.
consolidatedData : object
An instance of ConsolidatedData.
Raises
------
"""
formations = [{'GKP': 1, 'DEF': 5, 'MID': 3, 'FWD': 2, 'Score': 0},
{'GKP': 1, 'DEF': 5, 'MID': 4, 'FWD': 1, 'Score': 0},
{'GKP': 1, 'DEF': 5, 'MID': 2, 'FWD': 3, 'Score': 0},
{'GKP': 1, 'DEF': 4, 'MID': 3, 'FWD': 3, 'Score': 0},
{'GKP': 1, 'DEF': 4, 'MID': 5, 'FWD': 1, 'Score': 0},
{'GKP': 1, 'DEF': 4, 'MID': 4, 'FWD': 2, 'Score': 0},
{'GKP': 1, 'DEF': 3, 'MID': 5, 'FWD': 2, 'Score': 0},
{'GKP': 1, 'DEF': 3, 'MID': 4, 'FWD': 3, 'Score': 0}]
player_index = 0
total_points = 0
current_formation = {'GKP': 0, 'DEF': 0, 'MID': 0, 'FWD': 0}
nextGameWeek = consolidatedData.projectionsData.sixGameProjections[0].columns.values[-8]
playersInTeam.sort(key=lambda x: (x['position_name'], -x[nextGameWeek]))
for formation in formations:
team_copy = playersInTeam.copy()
while current_formation != formation and len(team_copy) > player_index:
current_player = team_copy[player_index]
# This approach assumes the team is sorted by projected points in the next game week
if Team.add_player_to_formation(current_player, current_formation, formation):
total_points += current_player[nextGameWeek]
del team_copy[player_index]
player_index = 0
else:
player_index = player_index + 1
formation['Score'] = round(total_points, 2)
total_points = 0
player_index = 0
current_formation = {'GKP': 0, 'DEF': 0, 'MID': 0, 'FWD': 0}
formations.sort(key=lambda x: (-x['Score']))
return formations
@staticmethod
def add_player_to_formation(current_player, current_formation, formation):
"""Attempt to add a player to a formation.
Parameters
----------
current_player : dict
The proposed player.
current_formation : dict
The current formation.
formation : dict
The current formation for which the player is proposed.
Raises
------
"""
player_added = True
if current_player['position_name'] == 'GKP' and current_formation.get('GKP') + 1 <= formation.get('GKP'):
current_formation['GKP'] = current_formation['GKP'] + 1
elif current_player['position_name'] == 'DEF' and current_formation.get('DEF') + 1 <= formation.get('DEF'):
current_formation['DEF'] = current_formation['DEF'] + 1
elif current_player['position_name'] == 'MID' and current_formation.get('MID') + 1 <= formation.get('MID'):
current_formation['MID'] = current_formation['MID'] + 1
elif current_player['position_name'] == 'FWD' and current_formation.get('FWD') + 1 <= formation.get('FWD'):
current_formation['FWD'] = current_formation['FWD'] + 1
else:
player_added = False
return player_added
def generate_candidates_representation(self):
"""Generate a representation of the candidates.
Parameters
----------
Raises
------
"""
printListPoints = []
printListIctIndex = []
sixGameProjectionHeader = self.consolidatedData.projectionsData.sixGameProjections[0].columns.values[-2]
nextGameWeekHeader = self.consolidatedData.projectionsData.sixGameProjections[0].columns.values[-8]
for i in self.playersInTeam:
printDictPoints = OrderedDict((k, i[k]) for k in (
'web_name', 'team_name', 'position_name', sixGameProjectionHeader, nextGameWeekHeader, 'candidates',
'candidates_this_gw'))
printListPoints.append(printDictPoints)
printDictIctIndex = OrderedDict(
(k, i[k]) for k in ('web_name', 'team_name', 'position_name', 'ict_index', 'ict_index_candidates'))
printListIctIndex.append(printDictIctIndex)
sortedPrintListPoints = sorted(printListPoints, key=lambda x: (x['position_name'], -x[sixGameProjectionHeader]))
sortedPrintListIctIndex = sorted(printListIctIndex, key=lambda x: (x['position_name'], -float(x['ict_index'])))
# print(tabulate(sortedPrintListPoints, headers="keys", tablefmt="github"))
# print(tabulate(sortedPrintListIctIndex, headers="keys", tablefmt="github"))
expected_results = [i for i in self.consolidatedData.officialAPIData.players['elements'] if i['status'] != 'u']
failed_merge = [i for i in self.consolidatedData.officialAPIData.players['elements'] if
i['merge_status_six_game'] != 'both' and i['status'] != 'u']
no_projections = [i for i in self.consolidatedData.officialAPIData.players['elements'] if
math.isnan(i[sixGameProjectionHeader]) and i['status'] != 'u' and i[
'merge_status_six_game'] == 'both']
failed_merge_player_info = [
[i["web_name_clean"], i["team_name"], i["position_name"], i["merge_status_six_game"]]
for i in failed_merge]
no_projections_player_info = [
[i["web_name_clean"], i["team_name"], i["position_name"], i["merge_status_six_game"]]
for i in no_projections]
candidates_representation = str(
tabulate(sortedPrintListPoints, headers="keys", tablefmt="html", stralign="left", numalign="left",
colalign="left") + "<br>" +
tabulate(sortedPrintListIctIndex, headers="keys", tablefmt="html", stralign="left", numalign="left",
colalign="left") +
"<br>" + str(len(expected_results))
+ " active players from the Official Fantasy Premier League API have been matched to "
+ str(len(expected_results) - len(failed_merge) - len(no_projections))
+ " Fantasy Football Scout six game projections."
+ "<br>" + "The following merge failures occurred between the official Fantasy Premier League API and "
"the Fantasy Football Scout six game projections: "
+ str(failed_merge_player_info)
+ "<br> The following players were matched but have an invalid Fantasy Football Scout six game projection: "
+ str(no_projections_player_info)) + "<br>"
return candidates_representation
def generate_formations_representation(self):
"""Generate a representation of the formations of the team.
Parameters
----------
Raises
------
"""
formations_representation = "Formations and their scores: " + str(
sorted(self.formations, key=lambda x: (x['Score']), reverse=True)) + "<br>"
return formations_representation
``` |
{
"source": "jrandj/trading_strategy",
"score": 4
} |
#### File: jrandj/trading_strategy/Result.py
```python
import pandas as pd
import numpy as np
import config
class Result:
"""
A class used to represent a Result.
Attributes
----------
ticker : sequence
The stock ticker.
data : dataframe
The historical data associated with the ticker.
strategy : Strategy
An instance of the Strategy class.
buy_transactions: sequence
List of buy transactions.
sell_transactions: sequence
List of sell transactions.
buy_transaction_equity: sequence
List of equity values corresponding to the buy transactions.
sell_transaction_equity: sequence
List of equity values corresponding to the sell transactions.
Performance : Performance
An instance of the Performance class.
transactions : numeric
The required multiple of the 20D MA volume to generate a buy signal.
Methods
-------
performance_as_dict()
Returns the performance results in a dictionary.
tech_indicators()
Augments the data attribute with columns for technical indicators.
buy_and_sell_signals()
Calculate signals where they can be vectorised.
trade()
Enters and exit positions based on buy/sell signals.
calculate_returns()
Calculate returns after the trade method has been executed.
print_results()
Print the performance results to the console.
"""
def __init__(self, ticker, strategy, raw_data):
self.ticker = ticker
self.data = raw_data
self.strategy = strategy
self.tech_indicators()
self.buy_and_sell_signals()
self.buy_transactions, self.sell_transactions, self.buy_transaction_equity, self.sell_transaction_equity = self.trade()
self.Performance = self.calculate_returns()
self.transactions = len(self.buy_transactions + self.sell_transactions)
self.print_results()
def performance_as_dict(self):
"""Returns the performance results in a dictionary.
Parameters
----------
Raises
------
"""
return {'ticker': self.ticker, 'strategy': "Strategy(" + str(self.strategy.required_profit) + ", " + str(
self.strategy.required_pct_change_min) + ", " + str(self.strategy.required_pct_change_max) + ", " + str(
self.strategy.required_volume) + ")",
'annualised_return': self.Performance.annualised_return,
'annualised_return_ref': self.Performance.annualised_return_ref,
'end_date': self.Performance.end_date,
'end_price': self.Performance.end_price,
'gain': self.Performance.gain,
'gain_ref': self.Performance.gain_ref,
'start_date': self.Performance.start_date,
'start_price': self.Performance.start_price}
def tech_indicators(self):
"""Augments the data attribute with columns for technical indicators.
Parameters
----------
Raises
------
"""
self.data = self.data.assign(close_MA_50=self.data[["close"]].ewm(span=50).mean())
self.data = self.data.assign(close_MA_200=self.data[["close"]].ewm(span=200).mean())
self.data = self.data.assign(volume_MA_20=self.data[["volume"]].rolling(20).mean())
self.data = self.data.assign(
price_change_buy=self.data['close'].pct_change().between(self.strategy.required_pct_change_min,
self.strategy.required_pct_change_max))
self.data = self.data.assign(
volume_change_buy=(self.data["volume"] > self.strategy.required_volume * self.data["volume_MA_20"]))
# Money Flow Index (MFI)
typical_price = (self.data["high"] + self.data["low"] + self.data["close"]) / 3
money_flow = typical_price * self.data["volume"]
delta = money_flow - money_flow.shift(1)
delta = pd.Series([0 if np.isnan(x) else x for x in delta])
positive_money_flow = pd.Series([x if x > 0 else 0 for x in delta])
negative_money_flow = pd.Series([abs(x) if x < 0 else 0 for x in delta])
positive_money_flow_sum = positive_money_flow.rolling(window=14).sum().values
negative_money_flow_sum = negative_money_flow.rolling(window=14).sum().values
with np.errstate(divide='ignore', invalid='ignore'):
money_ratio = positive_money_flow_sum / negative_money_flow_sum
money_flow_index = 100 - 100 / (1 + money_ratio)
self.data = self.data.assign(MFI=money_flow_index)
# Relative Strength Index (RSI)
delta = self.data["close"] - self.data["close"].shift(1)
delta = pd.Series([0 if np.isnan(x) else x for x in delta])
up = pd.Series([x if x > 0 else 0 for x in delta])
down = pd.Series([abs(x) if x < 0 else 0 for x in delta])
with np.errstate(divide='ignore', invalid='ignore'):
rs = up.rolling(window=14).mean().values / down.rolling(window=14).mean().values
relative_strength_index = 100 - 100 / (1 + rs)
self.data = self.data.assign(RSI=relative_strength_index)
# Stochastic Oscillator
stochastic_oscillator = pd.Series(
(self.data["close"] - self.data["close"].rolling(window=14, center=False).min()) / (
self.data["close"].rolling(window=14, center=False).max() - self.data["close"].rolling(window=14,
center=False).min()))
stochastic_oscillator = 100 * stochastic_oscillator.rolling(window=3).mean()
self.data = self.data.assign(STO=stochastic_oscillator)
# Bollinger Bands
rolling_mean = self.data[["close"]].ewm(span=50).mean()
rolling_std = self.data[["close"]].ewm(span=50).std()
self.data = self.data.assign(BB_upper=rolling_mean + (rolling_std * 2))
self.data = self.data.assign(BB_lower=rolling_mean - (rolling_std * 2))
return
def buy_and_sell_signals(self):
"""Calculate signals where they can be vectorised.
Generation of sell signal requires iterating through the data which is done in the trade method.
Parameters
----------
Raises
------
"""
self.data = self.data.assign(buy_signal=np.nan, sell_signal=np.nan, buy_signal_date=np.nan,
sell_signal_date=np.nan)
buy_prices = self.data["close"].iloc[np.where(self.data["volume_change_buy"] & self.data["price_change_buy"])]
buy_dates = self.data["date"].iloc[np.where(self.data["volume_change_buy"] & self.data["price_change_buy"])]
self.data = self.data.assign(buy_signal=buy_prices)
self.data = self.data.assign(buy_signal_date=buy_dates)
return
def trade(self):
"""Enters and exit positions based on buy/sell signals.
Parameters
----------
Raises
------
"""
buy_transactions, buy_transaction_equity, sell_transactions, sell_transaction_equity = ([] for i in range(4))
open_long_position, buy_and_hold, buy_and_hold_shares, buy_and_hold, buy_and_hold_shares, shares = (
0, 0, 0, 0, 0, 0)
buy_and_hold_position_array, open_long_position_array, strategy_equity_array, buy_and_hold_equity_array = (
np.full(len(self.data["close"].values), np.nan) for i in range(4))
# Create buy signal and buy signal dates without NaN or NaT (NaN and NaT inclusive arrays required for plots)
buy_signal_array_nonan = self.data["buy_signal"].values[~np.isnan(self.data["buy_signal"].values)]
buy_signal_array_dates_nonat = self.data["buy_signal_date"].values[
~np.isnat(self.data["buy_signal_date"].values)]
j = 0
cash = config.cash
buy_and_hold_cash = config.buy_and_hold_cash
for i in range(0, len(self.data["close"].values)):
# Handle buy
if np.isfinite(self.data["buy_signal"].values[i]):
if not open_long_position:
open_long_position = self.data["close"].values[i]
shares = (1 - config.transaction_fee) * (cash / open_long_position)
cash = 0
buy_transactions.append(pd.to_datetime(self.data["date"].values[i]).strftime("%d-%m-%Y"))
buy_transaction_equity.append(round(shares * self.data["close"].values[i] + cash, 2))
if not buy_and_hold:
buy_and_hold_shares = ((1 - config.transaction_fee) * buy_and_hold_cash) / \
self.data["close"].values[i]
buy_and_hold_cash = 0
buy_and_hold = 1
# Handle sell
elif (j < len(buy_signal_array_nonan) and self.data["date"].values[i] > buy_signal_array_dates_nonat[j] and
self.data["close"].values[
i] > self.strategy.required_profit *
buy_signal_array_nonan[j]):
# Need to offset the index which is based on the original dataframe with all tickers
self.data.at[self.data.index[0] + i, "sell_signal"] = self.data["close"].values[i]
self.data.at[self.data.index[0] + i, "sell_signal_date"] = pd.to_datetime(self.data["date"].values[i])
if open_long_position:
j = j + 1
cash = (1 - config.transaction_fee) * shares * self.data["close"].values[i]
shares = 0
open_long_position = 0
sell_transactions.append(pd.to_datetime(self.data["date"].values[i]).strftime("%d-%m-%Y"))
sell_transaction_equity.append(round(shares * self.data["close"].values[i] + cash, 2))
# Record open positions
open_long_position_array[i] = self.data["close"].values[i] if open_long_position else 0
buy_and_hold_position_array[i] = self.data["close"].values[i] if buy_and_hold else 0
# Record equity
buy_and_hold_equity_array[i] = buy_and_hold_shares * buy_and_hold_position_array[
i] + buy_and_hold_cash
strategy_equity_array[i] = shares * open_long_position_array[i] + cash
self.data.sell_signal_date = self.data.sell_signal_date.astype("datetime64[ns]", copy=False)
self.data = self.data.assign(strategy_equity=strategy_equity_array,
buy_and_hold_equity=buy_and_hold_equity_array,
open_long_position=open_long_position_array,
buy_and_hold_position=buy_and_hold_position_array)
return buy_transactions, sell_transactions, buy_transaction_equity, sell_transaction_equity
def calculate_returns(self):
"""Calculate returns after the trade method has been executed.
Parameters
----------
Raises
------
"""
# Calculate returns using strategies and buy and hold
date_index_long = np.isfinite(self.data["open_long_position"])
date_index_buy_and_hold = np.isfinite(self.data["buy_and_hold_position"])
# Handle case where there is no long position
if self.data["date"][date_index_long].empty:
performance = Performance(0, 0, 0, 0, 0, 0, 0, 0)
return performance
else:
start_date = self.data["date"][date_index_long].iloc[0]
start_date_ref = self.data["date"][date_index_buy_and_hold].iloc[0]
start_price = self.data["strategy_equity"][date_index_long].iloc[0]
start_price_ref = self.data["buy_and_hold_equity"][date_index_buy_and_hold].iloc[0]
end_date = self.data["date"][date_index_long].iloc[-1]
end_date_ref = self.data["date"][date_index_buy_and_hold].iloc[-1]
end_price = self.data["strategy_equity"][date_index_long].iloc[-1]
end_price_ref = self.data["buy_and_hold_equity"][date_index_buy_and_hold].iloc[-1]
# Compute annualised returns
delta = 1 + (end_date - start_date).days
delta_ref = 1 + (end_date_ref - start_date_ref).days
annualised_return = 100 * (((end_price / start_price) ** (365 / delta)) - 1)
annualised_return_ref = 100 * (((end_price_ref / start_price_ref) ** (365 / delta_ref)) - 1)
gain = end_price / start_price
gain_ref = end_price_ref / start_price_ref
performance = Performance(annualised_return, annualised_return_ref, start_price, start_date, end_price,
end_date, gain, gain_ref)
return performance
def print_results(self):
"""Print the performance results to the console.
Parameters
----------
Raises
------
"""
print(str(self.ticker) + " Strategy Annual Return: " + str(self.Performance.annualised_return) + "%" + "\n" +
str(self.ticker) + " Buy Signals: " + str(
[pd.to_datetime(i).strftime("%d-%m-%Y") for i in self.data["buy_signal_date"].tolist() if
not pd.isna(i)]) + "\n" +
str(self.ticker) + " Buy Transactions: " + str(self.buy_transactions) + "\n" +
str(self.ticker) + " Buy Transaction Equity: " + str(self.buy_transaction_equity) + "\n" +
str(self.ticker) + " Position Start Date: " + str(
pd.to_datetime(self.Performance.start_date).strftime("%d-%m-%Y")) + "\n" +
str(self.ticker) + " Position Equity Start: " + str(self.Performance.start_price) + "\n" +
str(self.ticker) + " Sell Signals: " + str(
[pd.to_datetime(i).strftime("%d-%m-%Y") for i in self.data["sell_signal_date"].tolist() if
not pd.isna(i)]) + "\n" +
str(self.ticker) + " Sell Transactions: " + str(self.sell_transactions) + "\n" +
str(self.ticker) + " Sell Transaction Equity: " + str(self.sell_transaction_equity) + "\n" +
str(self.ticker) + " Position End Date: " + str(
pd.to_datetime(self.Performance.end_date).strftime("%d-%m-%Y")) + "\n" +
str(self.ticker) + " Position Equity End: " + str(self.Performance.end_price) + "\n" +
str(self.ticker) + " Buy and Hold Annual Return: " + str(
self.Performance.annualised_return_ref) + "%" + "\n" +
str(self.ticker) + " Strategy Gain: " + str(self.Performance.gain) + "\n" +
str(self.ticker) + " Buy and Hold Gain: " + str(self.Performance.gain))
return
class Performance:
"""
A class used to hold the performance for the Result.
Attributes
----------
annualised_return : numeric
The annualised return based on equity changes following the buy and sell transactions (based on the trading
strategy) in the trade method.
annualised_return_ref : numeric
The annualised return based on equity changes following the buy and hold transactions in the trade method.
start_price : numeric
The equity at the start of the strategy.
start_date : numeric
The date at the start of the strategy.
end_price : numeric
The equity at the end of the strategy.
end_date : numeric
The date at the end of the strategy.
gain : numeric
The raw gain (i.e. not annualised) based on equity changes following the buy and sell transactions (based on
the trading strategy) in the trade method.
gain_ref : numeric
The raw gain (i.e. not annualised) based on equity changes following the buy and hold transactions
in the trade method.
Methods
-------
"""
def __init__(self, annualised_return, annualised_return_ref, start_price, start_date, end_price, end_date, gain,
gain_ref):
self.annualised_return = np.round(annualised_return, 2)
self.annualised_return_ref = np.round(annualised_return_ref, 2)
self.start_price = np.round(start_price, 2)
self.start_date = start_date
self.end_price = np.round(end_price, 2)
self.end_date = end_date
self.gain = np.round(gain, 2)
self.gain_ref = np.round(gain_ref, 2)
return
```
#### File: jrandj/trading_strategy/Strategy.py
```python
class Strategy:
"""
A class used to represent a Strategy.
Attributes
----------
required_profit : numeric
The profit multiple required to exit a position.
required_pct_change_min : numeric
The price change lower bound to generate a buy signal.
required_pct_change_max : numeric
The price change upper bound to generate a buy signal.
required_volume : numeric
The required multiple of the 20D MA volume to generate a buy signal.
Methods
-------
"""
def __init__(self, required_profit, required_pct_change_min, required_pct_change_max, required_volume):
"""
Parameters
----------
required_profit : numeric
The profit multiple required to exit a position.
required_pct_change_min : numeric
The price change lower bound to generate a buy signal.
required_pct_change_max : numeric
The price change upper bound to generate a buy signal.
required_volume : numeric
The required multiple of the 20D MA volume to generate a buy signal.
"""
self.required_profit = required_profit
self.required_pct_change_min = required_pct_change_min
self.required_pct_change_max = required_pct_change_max
self.required_volume = required_volume
``` |
{
"source": "jrandolph13/Multi-Blockchain-Wallet-in-Python",
"score": 2
} |
#### File: jrandolph13/Multi-Blockchain-Wallet-in-Python/wallet.py
```python
import os
import subprocess
import json
from dotenv import load_dotenv
from constants import *
from bit import Key, PrivateKey, PrivateKeyTestnet
from bit.network import NetworkAPI
from bit import *
from web3 import Web3
from eth_account import Account
# Load and set environment variables
load_dotenv()
mnemonic=os.getenv("mnemonic","ridge burst wash eager infant old vocal coast describe wood stove recall")
print(mnemonic)
# Import constants.py and necessary functions from bit and web3
BTC = 'btc'
ETH = 'eth'
BTCTEST = 'btc-test'
w3 = Web3(Web3.HTTPProvider("HTTP://127.0.0.1:7545"))
w3.eth.getBalance("0x50eE8d6D03f36B893E6483453FBf81FCEbd39857")
from web3.gas_strategies.time_based import medium_gas_price_strategy
w3.eth.setGasPriceStrategy(medium_gas_price_strategy)
# Create a function called `derive_wallets`
def derive_wallets(mnemonic, coin, numderive):
print("Inside function")
command = f'php ~/hd-wallet-derive/hd-wallet-derive.php -g --mnemonic="{mnemonic}" --numderive="{numderive}" --coin="{coin}" --format=json'
p = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
p_status=p.wait()
keys = json.loads(output)
print(keys)
return keys
# Create a dictionary object called coins to store the output from `derive_wallets`.
coins = ["eth", "btc-test", "btc"]
numderive = 3
keys = {}
for coin in coins:
keys[coin]= derive_wallets(mnemonic, coin, numderive=3)
eth_PrivateKey = keys ["eth"][0]["privkey"]
btc_PrivateKey = keys ["btc-test"][0]["privkey"]
print(json.dumps(eth_PrivateKey, indent=4, sort_keys=True))
print(json.dumps(btc_PrivateKey, indent=4, sort_keys=True))
print(json.dumps(keys, indent=4, sort_keys=True))
# Create a function called `priv_key_to_account` that converts privkey strings to account objects.
def priv_key_to_account(coin, priv_key):
if coin == ETH:
return Account.privateKeyToAccount(priv_key)
if coin == BTCTEST:
return PrivateKeyTestnet(priv_key)
eth_acc = priv_key_to_account(ETH, eth_PrivateKey)
btc_acc = priv_key_to_account (BTCTEST, btc_PrivateKey)
# Create a function called `create_tx` that creates an unsigned transaction appropriate metadata.
def create_tx(coin, account, recipient, amount):
global trx_data
if coin ==ETH:
gasEstimate = w3.eth.estimateGas(
{"from": account.address, "to": recipient, "value": amount}
)
trx_data = {
"to": recipient,
"from": account.address,
"value": amount,
"gasPrice": w3.eth.gasPrice,
"gas": gasEstimate,
"nonce": w3.eth.getTransactionCount(account.address)
}
return trx_data
if coin ==BTCTEST:
return PrivateKeyTestnet.prepare_transaction(account.address, [(recipient, amount, BTC)])
# Create a function called `send_tx` that calls `create_tx`, signs and sends the transaction.
def send_tx(coin, account, recipient, amount):
if coin == "eth":
tx_eth = create_tx(coin,account, recipient, amount)
sign = account.signTransaction(tx_eth)
result = w3.eth.sendRawTransaction(sign.rawTransaction)
print(result.hex())
return result.hex()
else:
trx_btctest= create_tx(coin,account,recipient,amount)
sign_trx_btctest = account.sign_transaction(trx_btctest)
from bit.network import NetworkAPI
NetworkAPI.broadcast_tx_testnet(sign_trx_btctest)
return sign_trx_btctest
``` |
{
"source": "jrandson/data-structures",
"score": 4
} |
#### File: data-structures/linked-lists/double_end_queue_test.py
```python
import unittest
from double_end_queue import DoubleEndQueue
class TestDoubleEndQueue(unittest.TestCase):
def test_initialization(self):
content = []
d_queue = DoubleEndQueue(content)
self.assertEqual(content,d_queue.get_content())
content = [1,2,4,3,6]
d_queue = DoubleEndQueue(content)
self.assertEqual(content,d_queue.get_content())
def test_add_first(self):
d_queue = DoubleEndQueue()
d_queue.add_first(8)
self.assertEqual([8],d_queue.get_content())
content = [3,6]
d_queue = DoubleEndQueue(content)
d_queue.add_first(10)
self.assertEqual([10,3,6],d_queue.get_content())
def test_add_last(self):
d_queue = DoubleEndQueue()
d_queue.add_last(8)
self.assertEqual([8],d_queue.get_content())
content = [3,6]
d_queue = DoubleEndQueue(content)
d_queue.add_last(10)
self.assertEqual([3,6,10],d_queue.get_content())
def test_delete_first(self):
content = [3,6]
d_queue = DoubleEndQueue(content)
d_queue.delete_first()
self.assertEqual([6],d_queue.get_content())
d_queue = DoubleEndQueue()
d_queue.delete_first()
self.assertEqual([],d_queue.get_content())
def test_delete_last(self):
content = [3,6]
d_queue = DoubleEndQueue(content)
d_queue.delete_last()
self.assertEqual([3],d_queue.get_content())
d_queue = DoubleEndQueue()
d_queue.delete_last()
self.assertEqual([],d_queue.get_content())
def test_fist(self):
d_queue = DoubleEndQueue()
self.assertEqual(None,d_queue.first())
content = [3,6]
d_queue = DoubleEndQueue(content)
self.assertEqual(3,d_queue.first())
def test_last(self):
d_queue = DoubleEndQueue()
self.assertEqual(None,d_queue.last())
content = [3,6]
d_queue = DoubleEndQueue(content)
self.assertEqual(6,d_queue.last())
def test_is_empty(self):
content = [3,6]
d_queue = DoubleEndQueue(content)
d_queue.delete_first()
self.assertEqual(False,d_queue.is_empty())
d_queue.delete_first()
self.assertEqual(True,d_queue.is_empty())
def test_len(self):
pass
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jrandson/desafio-radix",
"score": 4
} |
#### File: desafio-radix/notebook/Desafio Radix.py
```python
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
#p1_data_test_df = pd.read_csv('p1_data_test.csv',header=0)
df = pd.read_csv('../p1_data_train.csv',header=0)
print len(df)
pct = int(len(df)*0.5)
print pct
new_df = df[df.index > pct]
new_df
# In[2]:
def get_outliers_index(df, columns, gama = 1.5):
index_to_drop = []
for column in columns:
q2 = df[column].median()
q3 = df[df[column] > q2][column].median()
q1 = df[df[column] < q2][column].median()
IQR = q3 - q1
index_to_drop += list(df[(df[column] > q3 + gama*IQR) | (df[column] < q1 - gama*IQR)][column].index.values)
return list(np.unique(index_to_drop))
# In[4]:
df.head()
index_to_drop = get_outliers_index(df,['Temp1','Temp2','Temp3','Temp4'])
print df.shape
print len(index_to_drop)
print index_to_drop
df = df.drop(df.index[index_to_drop])
print df.shape
# In[ ]:
data = {'name': ['Jason', 'Molly', 'Tina', 'Jake', 'Amy'],
'year': [2012, 2012, 2013, 2014, 2014],
'reports': [4, 24, 31, 2, 3]}
df = pd.DataFrame(data, index = ['Cochice', 'Pima', 'Santa Cruz', 'Maricopa', 'Yuma'])
df
df.drop(df.index[[0,1,2]])
# In[192]:
``` |
{
"source": "jrandson/Introduction-to-data-science",
"score": 3
} |
#### File: Introduction-to-data-science/3th Assigment/scripts.py
```python
df['Data'] = ['December', 'januany',' June']
#broadcast
df['Delivered'] = True
df.reset_index
df['Date'] = Series(['...'])
df.where(df['SUMLEVEL'] == 50)
.dropan()
.set_index(['STNAME','CTYNAME'])
.rename(columns=[])
print(df.drop(df[df['Quantity'] == 0].index).rename(columns={'Weight': 'Weight (oz.)'}))
df.rename(columns={...})
df.drop(df[mask])
df['STNAME'].unique()
df.groupby('STNAME')
#aplica uma função aos valores de um acoluna específica
df.groupby(['STNAME']).agg({'CENSUS2010POP': np.average})
#applying a function throught the data frame
print(df.groupby('Category').apply(lambda df,a,b: sum(df[a] * df[b]), 'Weight (oz.)', 'Quantity'))
# Or alternatively without using a lambda:
# def totalweight(df, w, q):
# return sum(df[w] * df[q])
#
# print(df.groupby('Category').apply(totalweight, 'Weight (oz.)', 'Quantity'))
df.set_index('STNAME').groupby(level=0)['CENSUS2010POP'].agg({'AVG':np.average,'sum':np.sum})
#agg => aggregation function
#Scales
#create catogories
df['Grades'].astype['category'].head()
s = pd.Series(['Low', 'Low', 'High', 'Medium', 'Low', 'High', 'Low'])
s.astype('category', categories=['Low', 'Medium', 'High'], ordered=True)
df = pd.read_csv('census.csv')
df = df[df['USMLEV'] == 50]
df.set_index('STNAME').groupby(level=0)['CENSUS2010POP'].agg('avg',np.avarege)
df.cut(df['avg'],3)
#bin the values into 3 groups
s = pd.Series([168, 180, 174, 190, 170, 185, 179, 181, 175, 169, 182, 177, 180, 171])
pd.cut(s, 3)
# You can also add labels for the sizes [Small < Medium < Large].
pd.cut(s, 3, labels=['Small', 'Medium', 'Large'])
#Pivot tables
#summarizing datas in a dateframe for a particular proposing
#makes havy use of aggregation functions
df = pd.read_csv('cars.csv')
df.head()
df.pivot_table(values='kw', index='YEAR', columns='Make', aggfunc=np.mean)
print(pd.pivot_table(Bikes, index=['Manufacturer','Bike Type']))
#Data functionality
``` |
{
"source": "jrandson/RememberTheCheese",
"score": 2
} |
#### File: jrandson/RememberTheCheese/tests.py
```python
import datetime
from django.utils import timezone
from .models import Task,SubTask
from django.core.urlresolvers import reverse
from django.test import Client
from django.test.utils import setup_test_environment
from django.test import TestCase
from .forms import UserForm
from django.contrib.auth.models import User
class TaskMethodTest(TestCase):
def setup(self):
pass
def test_description_should_not_be_blank(self):
self.assertEqual(None, None)
def test_shoud_rate_task_completed(self):
task = Task.objects.create(description = 'task teste')
subtask1 = SubTask.objects.create(task = task)
subtask2 = SubTask(task = task)
subtask2.save()
subtask3 = SubTask(task = task)
subtask3.save()
self.assertEqual(0, task.get_pct_finished())
subtask1.finished = 1
subtask1.save()
self.assertEqual(100*round(1.0/3,2), task.get_pct_finished())
subtask2.finished = 1
subtask2.save()
self.assertEqual(100*round(2.0/3, 2), task.get_pct_finished())
subtask3.finished = 1
subtask3.save()
self.assertEqual(100 , task.get_pct_finished())
def test_show_friendly_deadline(self):
task = Task.objects.create(
description = 'task friendly deadline',
deadline = timezone.now()
)
self.assertEqual('Today', task.get_deadline())
task = Task.objects.create(
description = 'task friendly deadline',
deadline= timezone.now() + datetime.timedelta(days = 1)
)
self.assertEqual('Tomorrow', task.get_deadline())
task = Task.objects.create(
description = 'task friendly deadline',
deadline= timezone.now() - datetime.timedelta(days = 1)
)
self.assertEqual('Yesterday', task.get_deadline())
task = Task.objects.create(
description = 'task friendly deadline',
deadline= timezone.now() - datetime.timedelta(days = 2)
)
self.assertEqual('2 days ago', task.get_deadline())
task = Task.objects.create(
description = 'task friendly deadline',
deadline= timezone.now() + datetime.timedelta(days = 2)
)
self.assertEqual('In 2 days', task.get_deadline())
task.deadline = timezone.now() + datetime.timedelta(days = 4)
task.save()
self.assertEqual((timezone.now() + datetime.timedelta(days = 4)).date(), task.get_deadline().date())
def test_dead_line_is_today(self):
task = Task.objects.create(description="task",deadline=timezone.now())
self.assertEqual(True,task.is_for_today())
task.deadline = timezone.now() + datetime.timedelta(days = 1)
task.save()
self.assertEqual(False,task.is_for_today())
task.deadline = timezone.now() - datetime.timedelta(days = 1)
task.save()
self.assertEqual(False,task.is_for_today())
task.deadline = timezone.now() + datetime.timedelta(days = 10)
task.save()
self.assertEqual(False,task.is_for_today())
task.deadline = timezone.now() - datetime.timedelta(days = 12)
task.save()
self.assertEqual(False,task.is_for_today())
def test_id_for_today(self):
task = Task.objects.create(description = 'task', deadline= timezone.now() + datetime.timedelta(days=1))
self.assertEqual(False,task.is_for_today())
task = Task.objects.create(description = 'task', deadline= timezone.now())
self.assertEqual(True,task.is_for_today())
task = Task.objects.create(description = 'task', deadline= timezone.now() - datetime.timedelta(days=1))
self.assertEqual(False,task.is_for_today())
def test_is_late(self):
task = Task.objects.create(description = 'task', deadline= timezone.now() - datetime.timedelta(days=1))
self.assertEqual(True, task.is_late())
task = Task.objects.create(description = 'task', deadline= timezone.now())
self.assertEqual(False, task.is_late())
task = Task.objects.create(description = 'task', deadline= timezone.now() + datetime.timedelta(days=1))
self.assertEqual(False, task.is_late())
class TasksViewsTest(TestCase):
base_url = 'http://localhost:8000/rememberTheCheese/'
def setUp(self):
self.client = Client()
self.task1 = Task.objects.create(description = 'integration task ')
self.subtask1 = SubTask.objects.create(
description = 'integrayion subtask',
task = self.task1
)
def test_index_should_be_ok(self):
response = self.client.get(self.base_url)
self.assertEqual(200,response.status_code)
def test_should_create_a_valid_task(self):
response = self.client.post(
self.base_url + 'create_task/'
)
self.assertEqual(200,response.status_code)
def test_should_not_create_a_invalid_task(self):
qtd_tasks_before = Task.objects.all().count()
response = self.client.post(self.base_url + 'create_task/', {'description' : ''})
self.assertEqual(200,response.status_code)
#self.assertEqual(self.base_url, response['Location'])
qtd_tasks_after = Task.objects.all().count()
self.assertEqual(qtd_tasks_before, qtd_tasks_after)
def test_do_not_should_create_a_blank_subtask(self):
task = Task.objects.create()
subtask = SubTask.objects.create(task = task)
response = self.client.get(self.base_url+'detail_task/subtask', {'subtask_id' : subtask.id})
self.assertEqual(404, response.status_code)
def test_create_task_should_be_ok(self):
response = self.client.get(self.base_url+ 'create_task/')
self.assertEqual(200, response.status_code)
def test_update_task_should_be_ok(self):
task = Task.objects.create(description = 'teste')
subtask = SubTask.objects.create(description = 'teste', task = task)
response = self.client.get(self.base_url+'update_task/' + str(task.id))
self.assertEqual(301, response.status_code)
def test_detail_task_should_be_ok(self):
task = Task.objects.create(description = 'teste')
subtask = SubTask.objects.create(description = 'teste', task = task)
response = self.client.get(self.base_url+'detail_task/' + str(task.id))
self.assertEqual(301,response.status_code)
def test_task_for_today(self):
task1 = Task.objects.create(description='task 1', deadline = timezone.now() + datetime.timedelta(days=1))
task2 = Task.objects.create(description='task 2', deadline = timezone.now() - datetime.timedelta(days=1))
task_for_today = task1.get_tasks_for_today()
response = self.client.get('http://localhost/rememberTheCheese/today/')
self.assertEqual(200, response.status_code)
self.assertEqual(0,len(response.context['tasks']))
task3 = Task.objects.create(description='task 3', deadline = timezone.now())
self.assertEqual(True, task3.is_for_today())
response = self.client.get('http://localhost/rememberTheCheese/today/')
self.assertEqual(1, len(response.context['tasks']))
def test_should_create_user(self):
data = {
'username' : 'username',
"email" : '<EMAIL>',
"password": '<PASSWORD>',
"first_name" : 'foo',
"last_name" : 'bar',
}
form = UserForm({})
self.assertEqual(False,form.is_valid())
form = UserForm(data)
self.assertEqual(True,form.is_valid())
form.save()
self.assertEqual({},form.errors)
data = {
'username' : 'username',
"email" : '<EMAIL>',
"password": '<PASSWORD>',
"first_name" : 'foo',
"last_name" : 'bar',
}
form = UserForm({})
self.assertEqual(False,form.is_valid())
form = UserForm(data)
self.assertEqual(False,form.is_valid())
form.save()
self.assertEqual({},form.errors)
#response = self.client.get(self.base_url+'update_task/' + str(task.id))
``` |
{
"source": "jrandson/Testes-python",
"score": 3
} |
#### File: Testes-python/recursao/test_recursao.py
```python
import unittest
from recursao import find
from recursao import revert_sequence
from recursao import power
from recursao import get_max
from recursao import get_min
from recursao import produto
from recursao import log
from recursao import is_palindrome
class test_recursao(unittest.TestCase):
lista = [12, 20, 26, 29, 32, 39, 49, 50, 51, 53, 57, 58, 59, 62, 69, 79, 80, 85, 95]
def test_number_exixst(self):
self.assertEqual(find(self.lista, 50, 0, 18),7)
def test_number_bord(self):
self.assertEqual(find(self.lista, 12, 0, 18),0)
self.assertEqual(find(self.lista, 95, 0, 18),18)
#---------------------------------------------------------
def test_revert_sequnce(self):
self.assertEqual(revert_sequence('a'),'a')
self.assertEqual(revert_sequence('ab'),'ba')
self.assertEqual(revert_sequence('abc'),'cba')
self.assertEqual(revert_sequence('abcd'),'dcba')
self.assertEqual(revert_sequence('abcde'),'edcba')
def test_power(self):
self.assertEqual(power(1,0),1)
self.assertEqual(power(2,1),2)
self.assertEqual(power(2,3),8)
def test_get_max(self):
self.assertEqual(get_max([10],1),10)
self.assertEqual(get_max([10,12],2),12)
self.assertEqual(get_max([10,15,11],3),15)
self.assertEqual(get_max([10,15,11,20,1,23,19,8],8),23)
def test_get_min(self):
self.assertEqual(get_min([10],1),10)
self.assertEqual(get_min([10,12],2),10)
self.assertEqual(get_min([10,15,11],3),10)
self.assertEqual(get_min([10,15,11,20,1,23,19,8],8),1)
def test_produto(self):
self.assertEqual(produto(2,1),2)
self.assertEqual(produto(2,3),6)
def test_log(self):
self.assertEqual(log(1,2),0)
self.assertEqual(log(2,2),1)
self.assertEqual(log(4,2),2)
def test_is_palindrome(self):
self.assertEqual(is_palindrome('a',0,0),True)
self.assertEqual(is_palindrome('ab',0,1),False)
self.assertEqual(is_palindrome('aa',0,1),True)
self.assertEqual(is_palindrome('aba',0,2),True)
self.assertEqual(is_palindrome('abb',0,2),False)
self.assertEqual(is_palindrome('radebar',0,6),False)
self.assertEqual(is_palindrome('racecar',0,6),True)
self.assertEqual(is_palindrome('abcddcba',0,7),True)
if __name__ == '__main__':
unittest.main()
```
#### File: Testes-python/tic-tac-toe/tictactoe.py
```python
class TicTacToe:
def __init__(self):
self._board = [[' '] * 3 for i in range(3)]
self._player = 'X'
def get_current_player(self):
return self._player
def mark(self,i,j):
is_in_border = (0 <= i <= 2) and (0 <= j <= 2)
#raise ValueEror('Ivalid board position')
position_is_free = self._board[i][j] != ' '
#raise ValueError('Position already occupied')
game_is_over = not (self.winner() is None) or not self.border_is_full()
#raise ValueError('Game is already complete')
if not (is_in_border and position_is_free and game_is_over):
self._board[i][j] = self._player
if self._player == 'X':
self._player = 'O'
else:
self._player = 'X'
print self.__str__()
def border_is_full(self):
board = self._board
for i in range(3):
for j in range(3):
if board[i][j] == ' ':
return False
return True
def _is_win(self,mark):
board = self._board
return (mark == board[0][0] == board[0][1] == board[0][2] or
mark == board[1][0] == board[1][1] == board[1][2] or
mark == board[2][0] == board[2][1] == board[2][2] or
mark == board[0][0] == board[1][0] == board[2][0] or
mark == board[1][0] == board[1][1] == board[1][2] or
mark == board[2][0] == board[2][1] == board[2][2] or
mark == board[0][0] == board[1][1] == board[2][2] or
mark == board[2][0] == board[1][1] == board[0][2])
def winner(self):
for mark in 'XO':
if self._is_win(mark):
return mark
return None
def __str__(self):
rows = [' | '.join(self._board[r]) for r in range(3)]
return '\n---------\n'.join(rows)
game = TicTacToe()
while True:
mark_str = raw_input("Player " + game.get_current_player() + " (x, y): ")
mark_int = []
for r in mark_str:
if not r == ' ':
mark_int.append(int(r))
if len(mark_int) >= 2:
break
print game.mark(mark_int[0], mark_int[1])
if game.winner() is not None or game.border_is_full():
break
if not game.winner() == None:
print "Winner: " + game.winner()
```
#### File: Testes-python/word_cloud/main.py
```python
text = open('assay1.txt')
top_words = {}
# words to ignore
def add_top_words(item):
ignore_pt = ['e','a','as','de','do','da','dos','das'
'no','nos','o','os','as','um','uma','como',
'na','nas', 'para', 'com', 'que','em']
ignore_en = ['and','to','a','an', 'the','for', 'from','in','of','on','this','these','that','is','are']
ignore = ignore_pt + ignore_en
if not item in ignore:
if item in top_words.keys():
top_words[item] += 1
else:
top_words[item] = 1
def get_words_line(line):
for item in words_line:
add_top_words(item)
for line in text:
if not line == '':
words_line = line.split()
get_words_line(words_line)
text.close()
#show the occurrence of the words
treshould = 3
i = 0;
for key in top_words.keys():
top_words.values().sort()
if top_words[key] >= treshould:
print str(i) + ": " + key + ": " + str(top_words[key]) + " times"
i += 1
``` |
{
"source": "jrandson/try-youtube-dl",
"score": 3
} |
#### File: try-youtube-dl/testes/main.py
```python
import youtube_dl
from subprocess import call
import gzip
import zipfile, os, fnmatch
import shutil
from os import path, listdir
class System():
def __ini__(self):
pass
def split_full_path(self,path):
parent, name = path.split(path)
if parent == "":
return name
else:
return split_full_path(name) + parent
def get_extension(self,file_name):
return path.splitext(file_name)[1]
def exists(self,path_dir):
return path.exists(path_dir)
def write_file(self):
a = open('file.txt','a')
a.write('\n and another')
a.close()
def createfile(self):
content = "Lots of content here"
with gzip.open('file.gz', 'wb') as f:
f.write(content)
def get_content_dir(self,path_dir = None):
if path_dir == None:
path_dir = '.'
if self.exists(path_dir):
return listdir(path_dir)
else:
return []
def get_content_from_current_directory(self):
return listdir('.')
def get_full_path(self, dir_path):
for path_name in listdir(dir_path):
print path.join(dir_path,path_name)
def print_tree(self, dir_path):
for name in os.listdir(dir_path):
full_path = os.path.join(dir_path, name)
print full_path
if path.isdir(full_path):
self.print_tree(full_path)
def get_size(self, file_name):
return path.getsize(file_name)
def rename_copy_remove(self):
shutil.move(file_name, new_name)
shutil.copy(source, destine)
shutil.remove(file_name)
os.mkdir('dir_name')
#create many directory at once
os.makedirs('dir_parent/another_dir/more_one/dir_name')
#the dir must be empy first
os.rmdir('dir_name')
#ddanger: remove everything
shutil.rmtree('some_dir')
class YoutubeDl():
def __init__(self):
self.syst = System()
def download_music_from_url(self, url):
os.mkdir('tmp')
os.chdir('tmp')
command = "youtube-dl --extract-audio --audio-format mp3 "
command += url + " -c"
print "downloading " + url
call(command.split(), shell=False)
content = self.syst.get_content_dir()
if len(content) > 1:
file_name = self.compress_all_mp3_files()
shutil.move(file_name,'..')
else:
shutil.copy(content[0],'..')
os.chdir('..')
shutil.rmtree('tmp')
def download_musics_from_file_list(self, file_list):
try:
if not self.syst.exists(file_list):
raise Error('File not found')
f = open(file_list,'r')
for url in f:
if url :
command = "youtube-dl --extract-audio --audio-format mp3 "
command += url + " -c"
print "downloading " + url
call(command.split(), shell=False)
f.close()
except Error, e:
print 'An error has ocurred', e.value
def compress_all_mp3_files(self, directory =None):
if directory == None:
directory = '.'
file_name = 'donwload.zip'
musics= zipfile.ZipFile( file_name, 'w', zipfile.ZIP_DEFLATED )
contents = self.syst.get_content_dir(directory)
for content in contents:
if self.syst.get_extension(content) == '.mp3':
path_content = path.join(directory, content)
print path_content
musics.write(path_content)
musics.close()
return file_name
def compress_all_files(self, directory = None):
if directory == None:
directory = '.'
file_name = 'download.zip'
contents = self.syst.get_content_dir(directory)
musics= zipfile.ZipFile(file_name, 'w', zipfile.ZIP_DEFLATED )
for content in contents:
path_content = path.join(directory,content)
print path_content
musics.write(path_content)
musics.close()
return file_name
def zip_file(self, file_name):
zip_file = zipfile.ZipFile( 'zip_file.zip', 'w', zipfile.ZIP_DEFLATED )
zip_file .write(file_name)
zip_file .close()
syst = System()
ytd = YoutubeDl()
ytd.download_music_from_url('https://www.youtube.com/watch?v=2oHyOe1tlC8&list=PL8n8VJudkeOEOXYEhwlJDZUfJiNgGhmQb')
#ytd.download_musics_from_file_list('lista.txt')
``` |
{
"source": "jranek/EVI",
"score": 2
} |
#### File: evi/tools/evaluate.py
```python
import evi
import pandas as pd
import numpy as np
import scipy
import harmonypy as hm
from sklearn.preprocessing import MinMaxScaler
def compute_lisi(adata, basis, batch_key, perplexity):
X = adata.obsm[basis]
metadata = pd.DataFrame(adata.obs[batch_key].values, columns = [batch_key])
lisi = hm.compute_lisi(X, metadata, [batch_key], perplexity)
return lisi
def corr_dist(adata_batch, adata, batch_label, batch_key):
spliced_b = pd.DataFrame(adata_batch.layers['spliced'].todense(), index = adata_batch.obs_names, columns = adata_batch.var_names)
unspliced_b = pd.DataFrame(adata_batch.layers['unspliced'].todense(), index = adata_batch.obs_names, columns = adata_batch.var_names)
spliced_i = pd.DataFrame(adata.layers['spliced'].todense(), index = adata.obs_names, columns = adata.var_names)
unspliced_i = pd.DataFrame(adata.layers['unspliced'].todense(), index = adata.obs_names, columns = adata.var_names)
b = np.where(adata_batch.obs[batch_key] == batch_label)[0]
corr_list = []
for i in range(0, len(adata_batch.var_names)):
df_b = pd.concat([spliced_b.iloc[b, i], unspliced_b.iloc[b, i]], axis = 1)
cellind = df_b.iloc[np.where(df_b.sum(axis = 1) != 0)[0], :].index
df_b = df_b.loc[cellind]
mat_b = np.array(df_b.values)
df_i = pd.concat([spliced_i.iloc[:, i], unspliced_i.iloc[:, i]], axis = 1)
df_i = df_i.loc[cellind]
mat_i = np.array(df_i.values)
rho, pval = scipy.stats.spearmanr(scipy.spatial.distance.pdist(mat_b), scipy.spatial.distance.pdist(mat_i))
corr_list.append(rho)
return corr_list
def average_dataset_metric(df = None, m_order = None, metric = None, palette = None, figsize = None, save = False, filename = None):
#computes ranked aggregate scores by min-max scaling, then taking the mean across datasets
m = df[np.isin(df.index, m_order)]
scaler = MinMaxScaler()
m_ranked = pd.DataFrame(scaler.fit_transform(m), index = m.index, columns = m.columns)
m_ranked = m_ranked.reindex(m_order)
mean_metrics = pd.DataFrame(m_ranked.mean(1), columns = [metric])
nplots = len(m_ranked.columns)
evi.pl.ranked_barplot(df = m_ranked, figsize = figsize, y = m_ranked.index, save = save, palette = palette, filename = filename, nplots = nplots)
return mean_metrics
``` |
{
"source": "jrapin/noseexample",
"score": 4
} |
#### File: noseexample/codes/_core.py
```python
import numpy as np
def _digits_to_number(digits):
"""Returns a number from a sequence of digits
Example
-------
_digits_to_number([4, 3, 6])
>>>> 436
"""
return sum(val * 10**k for k, val in enumerate(digits[::-1]))
class Code(object):
"""4 digits code object
"""
def __init__(self, number):
# raise an exception if the input is incorrect (make it as explicit as possible)
if not isinstance(number, int):
raise TypeError("Expected an int but got an %s instead" % type(number))
# check that it has 4 digits
if number > 9999:
raise ValueError("Number %s has more than 4 digits" % number)
# save the number as a "private" variable (variables starting by "_" are considered private)
self._number = number
@property # this makes digits act as an attribute instead of a method
def digits(self):
# convert number as a string, iterate on the letters, and reconvert each digit as a integer
last_digits = [int(x) for x in "%s" % self._number]
# pad with 0 for the remaining digits
return np.array([0] * (4 - len(last_digits)) + last_digits) # np.array allows for easy math operation
# overload the + operator. Same thing would be done with * using __mult__, etc...
def __add__(self, other):
if not isinstance(other, Code):
other = Code(other)
digits = (self.digits + other.digits) % 10
return Code(_digits_to_number(digits))
def get_positive_shift(self):
"""Computes the positive difference between one digit and the next in the code sequence.
Example
-------
code = Code(1439)
code.get_positive_shift()
>>>> [1, 3, 9, 6]
"""
# compute the differences (diff),
# take the modulo toget the positive shift (a difference of -2 is equivalent to a difference of 8)
# stack the first digit
return np.hstack((self.digits[0], np.diff(self.digits) % 10))
``` |
{
"source": "jrapin/qprojects",
"score": 3
} |
#### File: qprojects/qprojects/_game.py
```python
from pathlib import Path
import ujson as json
import numpy as np
from . import _deck
from . import _utils
_BONUS_CARDS = {_deck.Card(c) for c in ["Qh", "Kh"]}
class DefaultPlayer: # pylint: disable=too-many-instance-attributes
"""Player which selects one card randomly at each round.
Note
----
This class is the base class for other players. They should mostly improve
methods "set_reward" and "_propose_card_to_play". A proposition can be rejected if
it proposed an card which could not be played. A random choice is made in this case,
and the acceptation_ratio attribute allows to keep track of how often the propositions
are accepted.
"""
def __init__(self):
self._cards = None
self._initial_cards = None # keep a record of initial cards for each game
self._order = None
self.last_correct = None
self._last_playable_cards = None
self._card_played_count = 0
self._erroneous_selection_count = 0
self._acceptation_queue = _utils.ReplayQueue(1000)
self.reward_sum = 0
self._last_acceptable = _utils.ReplayQueue(1000)
self.reinitialize()
def reinitialize(self):
self._cards = _deck.CardList([])
self._last_playable_cards = None
self._initial_cards = _deck.CardList([]) # keep a record of initial cards for each game
self._order = None
self.last_correct = 32
def get_acceptation_ratio(self):
"""Ratio of card proposition which have been accepted (allowed to play)
"""
return (self._card_played_count - self._erroneous_selection_count) / self._card_played_count
def get_instantaneous_acceptation_ratio(self):
"""Ratio of card proposition which have been accepted (allowed to play)
"""
return np.mean(self._acceptation_queue._data)
def get_mean_acceptable(self):
"""Ratio of card proposition which have been accepted (allowed to play)
"""
return np.mean(self._last_acceptable._data)
@property
def initial_cards(self):
return self._initial_cards
@property
def order(self):
return self._order
@property
def cards(self):
"""Makes a copy, to make sure no modification happens outside
"""
return _deck.CardList(self._cards, self._cards.trump_suit)
def initialize_game(self, order, cards):
"""Initialize a game with order and cards.
Parameters
----------
order: int
the order in which the player will play
cards: list
a list of 8 cards
Note
----
A game can only be initialized if the card list is empty (no ongoing game)
"""
assert not self._cards, "Cannot initialize a new game when card are still at play: {}".format(self._cards)
assert len(cards) == 8, "Wrong number of cards for initialization: {}.".format(self._cards)
self._cards = cards
self.last_correct = 32
self._initial_cards = _deck.CardList(cards)
self._order = order
def _get_playable_cards(self, board):
"""Returns the cards that can be played
"""
if self._cards.trump_suit is None:
self._cards.trump_suit = board.trump_suit
round_cards = board.get_current_round_cards()
return self._cards.get_playable_cards([] if len(round_cards) == 4 else round_cards)
def get_card_to_play(self, board):
"""Returns an acceptable card to play.
Parameter
---------
board: GameBoard
the current board for the game
Returns
-------
Card
an acceptable card to play in the current game
Note
----
This function makes sure the sent card is acceptable to play. It keeps tracks of remaining
cards, and of how often the propositions (from a neural network for instance) where accepted.
Propositions are provided through the "_propose_card_to_play" method.
The playable cards at this round are kept for later use in set_reward.
"""
selected = self._propose_card_to_play(board)
self._last_playable_cards = self._get_playable_cards(board)
if selected is None or selected not in self._last_playable_cards:
#print(np.round(self._get_expectations(board)), len(board.actions))
self._erroneous_selection_count += 1
self._acceptation_queue.append(False)
selected = np.random.choice(self._last_playable_cards)
card_num = len(board.actions)
if self.last_correct >= card_num:
self.last_correct = card_num
self._last_acceptable.append(card_num)
else:
self._acceptation_queue.append(True)
self._cards.remove(selected)
self._card_played_count += 1
return selected
def set_reward(self, board, value): # pylint: disable=unused-argument
"""Function to be called after each action on the board, to provide feedbacks for neural networks
for instance.
Parameter
---------
board: GameBoard
the current board for the game
value: int
the value of the reward
Note
----
This function is called after *each* action (from any player), while get_card_to_play method
is only called when it is the user's time to play.
"""
self.reward_sum += value
def _propose_card_to_play(self, board): # pylint: disable=unused-argument
"""Propose a card to play thanks to an advanced method.
Parameter
---------
board: GameBoard
the current board for the game
Returns
-------
Card
a card proposition for playig, which may be unacceptable.
Note
----
Implement a technique here.
"""
pass
def initialize_players_cards(players):
"""Initialize players for a new game.
This function sets the player order and its cards.
Parameter
---------
player: list
a list of 4 players.
"""
assert len(players) == 4
# initialize players' cards
cards = _deck.get_full_deck()
np.random.shuffle(cards)
for k, cards in enumerate(_utils.grouper(cards, 8)):
players[k].initialize_game(k, _deck.CardList(cards))
def play_game(board, players, verbose=False):
"""Plays a game, given a board with biddings and initialized players.
Parameters
----------
board: GameBoard
a board, with biddings already performed, but no action
players: list
a list of 4 initialized players, with 8 cards each and given orders
""" # IMPROVEMENT: handle partially played games
# checks
assert board.biddings, "Biddings must have been already performed"
assert not board.actions, "No cards should have already been played"
for k, player in enumerate(players): # make sure the data is correct
assert player._order == min(3, k)
assert len(player.cards) == 8
# game
for _ in range(32):
player_ind = board.next_player
card = players[player_ind].get_card_to_play(board)
points = board.add_played_card(card, verbose=verbose)
for k, player in enumerate(players):
player.set_reward(board, points[k % 2])
return board
class GameBoard:
"""Elements which are visible to all players.
Attributes
----------
actions: list
played cards, as a list of tuples of type (#player, card)
biddings: list
the sequence of biddings, as a list of tuples of type (#player, points, trump_suit)
"""
def __init__(self, actions=None, biddings=None):
self.biddings = [] if biddings is None else [(p, v, _deck._SUIT_CONVERTER.get(s, s)) for p, v, s in biddings]
self.next_player = 0
self.points = np.zeros((2, 32), dtype=int)
self._actions = [] if actions is None else [(p, _deck.Card(c)) for p, c in actions]
self._current_point_sum = 0
self._bonus_players = set()
self._current_point_position = 0 # checking that all cards are counted only once
if self._actions:
self._update_next_player()
self._process_actions_points()
def _as_dict(self):
data = {"actions": [(p, c.tag) for p, c in self.actions],
"biddings": self.biddings}
return data
@property
def actions(self):
return tuple(self._actions) # avoid direct modification
def dump(self, filepath):
"""Dumps a GameBoard to a file
Parameter
---------
filepath: str or Path
path to the file where to save the GameBoard.
"""
data = self._as_dict()
filepath = Path(filepath)
with filepath.open("w") as f:
json.dump(data, f)
@classmethod
def load(cls, filepath):
"""Loads a GameBoard from a file
Parameter
---------
filepath: str or Path
path to the file where the GameBoard is save.
Returns
-------
GameBoard
the loaded GameBoard
"""
filepath = Path(filepath)
with filepath.open("r") as f:
data = json.load(f)
actions = [(p, _deck.Card(c)) for p, c in data["actions"]]
board = cls(actions, [tuple(b) for b in data["biddings"]])
return board
def add_played_card(self, card, verbose=False):
"""Add the next card played.
The player is assumed to be the next_player.
This function saves the action, updates the next player and computes points.
Parameters
----------
card: Card
the card to play
verbose: bool
whether to print a summary after each round
Returns
-------
np.array
the points earned by each time, as an array of 2 elements
"""
self._actions.append((self.next_player, card))
player = self.next_player
self._update_next_player()
if verbose and not len(self._actions) % 4:
first_player_index = self.actions[-4][0]
print("Round #{} - Player {} starts: {}".format(len(self.actions) // 4, first_player_index,
self.get_current_round_cards().get_round_string()))
return self._process_card_points(len(self.actions) - 1, card, player, self.next_player)
def _update_next_player(self):
"""Updates the next_player attribute to either the following player (inside a round),
or the winner (end of a round).
"""
if len(self._actions) % 4:
self.next_player = (self._actions[-1][0] + 1) % 4
else:
round_cards = _deck.CardList([x[1] for x in self._actions[-4:]], self.trump_suit)
highest = round_cards.get_highest_round_card()
index = round_cards.index(highest)
self.next_player = (self._actions[-4][0] + index) % 4
def _process_card_points(self, index, card, player, next_player):
"""Computes the points earned after a card being played.
This function keeps a record of unaffected points (inside a round), and updates the "points"
attribute.
Returns
-------
np.array
the points earned by each time, as an array of 2 elements
"""
assert index == self._current_point_position, "Processing card #{} while expecting #{}".format(index, self._current_point_position)
self._current_point_sum += card.get_points(self.trump_suit)
if not (index + 1) % 4: # end of round
self.points[next_player % 2, index] = self._current_point_sum + (10 if index == 31 else 0)
self._current_point_sum = 0
# special reward
if self.trump_suit == "❤" and card in _BONUS_CARDS:
if player in self._bonus_players:
self.points[player % 2, index] += 20
self._bonus_players.add(player)
self._current_point_position += 1
return self.points[:, index]
@property
def trump_suit(self):
"""Selected trump suit for the game
"""
return self.biddings[-1][-1]
def __repr__(self):
return str(self._as_dict())
def assert_equal(self, other):
"""Asserts that the board is identical to the provided other board.
"""
for name in ["biddings", "actions"]:
for k, (element1, element2) in enumerate(zip(getattr(self, name), getattr(other, name))):
if element1 != element2:
raise AssertionError("Discrepency with element #{} of {}: {} Vs {}".format(k, name, element1, element2))
@property
def is_complete(self):
"""Returns whether the game is complete
The game is considered complete when all 32 cards are played and the 33rd element provides
the winner of the last round.
"""
return len(self.actions) == 32
def assert_valid(self):
"""Asserts that the whole sequence is complete and corresponds to a valid game.
"""
assert self.is_complete, "Game is not complete"
assert len({x[1] for x in self.actions}) == 32, "Some cards are repeated"
cards_by_player = list(self.replay_cards_iterator(with_trump_suit=True))
# check the sequence
first_player = 0
for k, round_actions in enumerate(_utils.grouper(self.actions, 4)):
# player order
expected_players = (first_player + np.arange(4)) % 4
players = [rc[0] for rc in round_actions]
np.testing.assert_array_equal(players, expected_players, "Wrong player for round #{}".format(k))
round_cards_list = _deck.CardList([x[1] for x in round_actions], self.trump_suit)
first_player = (first_player + round_cards_list.index(round_cards_list.get_highest_round_card())) % 4
# cards played
for i, (player, card) in enumerate(round_actions):
visible_round = _deck.CardList(round_cards_list[:i], self.trump_suit)
error_msg = "Unauthorized {} played by player {}.".format(card, player)
assert card in cards_by_player[player].get_playable_cards(visible_round), error_msg
cards_by_player[player].remove(card)
# last winner and function check
assert first_player == self.next_player, "Wrong winner of last round"
assert not any(x for x in cards_by_player), "Remaining cards, this function is improperly coded"
def get_current_round_cards(self):
"""Return the cards for the current round (or the round just played if all 4 cards have been played)
"""
end = min(len(self.actions), 32)
start = max(0, ((end - 1) // 4)) * 4
return _deck.CardList([x[1] for x in self.actions[start: end]], self.trump_suit)
def _process_actions_points(self):
"""Computes the sequence of points for both teams, on a complete game.
Returns
-------
np.array
a 2x32 array, with row 0 corresponding to points earned by team #0
(players #0 and #2) and raw 1 to team #1 (players #1 and #3) at
each card played.
Note
----
Only the 20 point bonus can be earned out of the end of a round.
"""
unprocessed = self.actions[self._current_point_position:]
for k, (player, card) in enumerate(unprocessed):
next_player = self.next_player if k + 1 == len(unprocessed) else unprocessed[k + 1][0]
self._process_card_points(self._current_point_position, card, player, next_player)
def replay_cards_iterator(self, with_trump_suit=False):
"""Create a new board with same card initializaton
Parameter
---------
with_trump_suit: bool
whether to set the same trump suit to the yielded Card lists (sets it to None otherwise)
Returns
-------
generator
a generator providing the cards of each player (from a complete game)
"""
assert self.is_complete, "Only finisehed games can be replayed"
cards_by_player = [[] for _ in range(4)]
for p_card in self.actions:
cards_by_player[p_card[0]].append(p_card[1])
return (_deck.CardList(cards, self.trump_suit if with_trump_suit else None) for cards in cards_by_player)
```
#### File: qprojects/qprojects/test_deck.py
```python
from unittest import TestCase
import genty
import numpy as np
from . import _deck
from . import _utils
from ._deck import Card as C
def test_card_equality_and_global_index():
card1 = _deck.Card("K♦")
card2 = _deck.Card("K♦")
assert card1 == card2
assert card1 == "K♦"
assert card1 != None # pylint: disable=singleton-comparison
assert card1 != 3
_ = _deck.Card("Q♦")
card3 = _deck.Card(card2)
assert card3 == card2
np.testing.assert_equal(card1.global_index, 13)
def test_card_hash():
card1 = _deck.Card("K♦")
card2 = _deck.Card("K♦")
card3 = _deck.Card("Q♦")
assert {card1, card2, card3} == {card1, card3}
def test_card_points_suit_and_value():
card = _deck.Card("J♦")
np.testing.assert_equal(card.get_points("♦"), 20)
np.testing.assert_equal(card.get_points("❤"), 2)
np.testing.assert_equal(card.suit, "♦")
np.testing.assert_equal(card.value, "J")
card = _deck.Card("10d")
np.testing.assert_equal(card.value, "10")
np.testing.assert_equal(card.suit, "♦")
np.testing.assert_equal(card.get_points("❤"), 10)
def test_cardlist_wrong_suit():
np.testing.assert_raises(AssertionError, _deck.CardList, [], "x")
def test_get_highest_round_card_empty():
cards = _deck.CardList([], "h")
np.testing.assert_equal(cards.get_highest_round_card(), None)
def test_get_round_string_error():
cards = _deck.CardList([], "h")
np.testing.assert_raises(RuntimeError, cards.get_round_string)
def test_full_deck_and_cardlist_as_array():
deck = _deck.get_full_deck()
np.testing.assert_equal(len(deck), 32)
np.testing.assert_equal(len(set(deck)), 32)
np.testing.assert_array_equal(deck.as_array(), [1 for _ in range(32)])
def test_global_index():
np.random.seed()
global_index = np.random.randint(32)
card = _deck.Card.from_global_index(global_index)
np.testing.assert_equal(card.global_index, global_index, err_msg="Card does not match global_index")
@genty.genty
class DeckTests(TestCase):
@genty.genty_dataset(
no_trump=("♣", "K♦"),
first_trump=("♦", "9♦"),
other_trump=("♠", "J♠"),
)
def test_get_highest_round_card(self, trump_suit, expected):
cards = [C("9♦"), C("K♦"), C("Q♠"), C("J♠"), C("A❤")]
cards = _deck.CardList(cards, trump_suit)
highest_card = cards.get_highest_round_card()
np.testing.assert_equal(highest_card, C(expected))
@genty.genty_dataset(
same_suit=(True, ["8♦", "Q♦"], ["9♦", "K♦"]),
same_suit_trump_by_partner=(True, ["8❤", "Q♦"], ["A❤"]),
same_suit_high_trump_by_partner=(True, ["J❤", "Q♦"], ["A❤", "7❤"]),
no_card=(True, ["7♣", "8♣"], ["A❤", "7❤"]),
no_card_with_lead=(True, ["9♣", "8♣"], ["A❤", "7❤", "9♦", "K♦", "Q♠", "J♠"]),
no_card_with_trump=(True, ["8♣", "8❤"], ["A❤"]),
no_card_with_high_trump=(True, ["8♣", "J❤"], ["A❤", "7❤"]),
no_card_with_trump_lead=(True, ["8♣", "8❤", "9♣"], ["A❤", "9♦", "K♦", "Q♠", "J♠"]),
no_card_no_trump=(False, ["8♣", "8❤"], ["9♦", "K♦", "Q♠", "J♠"]),
first_no_trump=(False, [], ["9♦", "K♦", "Q♠", "J♠"]),
)
def test_get_playable_cards(self, has_trump, round_cards, expected):
trump_suit = "❤"
expected = _deck.CardList(expected, trump_suit=trump_suit)
round_cards = _deck.CardList(round_cards, trump_suit)
hand_cards = _deck.CardList(["9d", "Kd", "Qs", "Js"] + (["Ah", "7h"] if has_trump else []), trump_suit)
playable = hand_cards.get_playable_cards(round_cards)
_utils.assert_set_equal(playable, expected)
# check that the order is deterministic
playable.assert_equal(expected)
@genty.genty_dataset(
no_trump=(["Q♦", "K♠", "9♦", "J♠"], '[ Q♦ ] K♠ 9♦ J♠ '),
with_trump=(["Q♦", "K❤", "9♦", "J♠"], ' Q♦ [ K❤ *] 9♦ J♠ '),
)
def test_get_round_string(self, cards, expected):
trump_suit = "❤"
cards = _deck.CardList(cards, trump_suit)
np.testing.assert_equal(cards.get_round_string(), expected)
@genty.genty_dataset(
same=("♣", ["8♣", "A❤"], None),
other_card=("♣", ["8♣", "Q❤"], AssertionError),
other_trump=("❤", ["8♣", "A❤"], AssertionError),
longer=("♣", ["8♣", "A❤", "Q❤"], AssertionError),
shorter=("♣", ["8♣"], AssertionError),
)
def test_card_list_assert_equal(self, other_trump, other_cards, expected):
round_cards = _deck.CardList(["8♣", "A❤"], "♣")
other = _deck.CardList(other_cards, other_trump)
if expected is None:
round_cards.assert_equal(other)
else:
np.testing.assert_raises(expected, round_cards.assert_equal, other)
```
#### File: jrapin/qprojects/script.py
```python
import numpy as np
import qprojects
import keras
network = qprojects.BasicNetwork(model_filepath="basic_start_17.h5", verbose=0, learning_rate=0.01)
#network = qprojects.BasicNetwork(model_filepath=None, verbose=0, learning_rate=0.01)
network.online_training = False
players = [qprojects.IntelligentPlayer(network) for _ in range(4)]
#learning_rate = 0.000001
learning_rate = 0.00001
#optimizer = keras.optimizers.SGD(lr=learning_rate, nesterov=False)
optimizer = keras.optimizers.RMSprop(lr=learning_rate, clipnorm=1)
network.model.compile(loss=network.output_framework.error, optimizer=optimizer)
def play_a_game(players, verbose=False):
qprojects.initialize_players_cards(players)
board = qprojects.GameBoard([], [(0, 80, np.random.choice([l for l in "hdsc"]))])
qprojects.play_game(board, players, verbose=verbose)
return board
learning_rate = 0.000001 # .001 decreased to .0002
#optimizer = keras.optimizers.SGD(lr=learning_rate, nesterov=False)
optimizer = keras.optimizers.RMSprop(lr=learning_rate) # , clipnorm=1)
network.model.compile(loss=network.output_framework.error, optimizer=optimizer)
num_external_iter = 0
batch_size = 16
while True:
for k in range(50):
if not (k + 1) % 10:
print(num_external_iter + 1, k + 1)
for p in players:
p.reinitialize()
board = play_a_game(players, verbose=False)
print("Acceptation ratio: {}".format(players[0].get_instantaneous_acceptation_ratio()))
print("Last acceptable: {}".format(players[0].get_mean_acceptable()))
num_external_iter += 1
#
policy = qprojects.epoch_policy(max_epoch=5)
cond = next(policy) # prime the policy
while cond:
output = network.fit(epochs=1, batch_size=batch_size)
cond = policy.send((output.history['loss'][-1], output.history['val_loss'][-1]))
# network.model.save("penalty_71.h5")
network.model.save("split_playability_conv_37.h5")
network.model.save("basic_start_19.h5")
# examples
index = np.random.randint(len(network._queue))
data = network._queue._data[index]
print(data[1])
output = network.predict(data[0])
output = network.model.predict(data[0][None, :, :])[0, :, :]
#print(np.round(output[:, None], 1))
np.concatenate([data[1], np.round(output, 1)], axis=1)
network._queue.get_random_selection(15)
errors = [(x, y) for x, y in network._queue._data if np.max(abs(y[:, 0] - network.model.predict(x[None, :, :])[0, :, 0])) > 0.7]
print(len(errors))
index = np.random.randint(len(errors))
data = errors[index]
print(data[1])
output = network.predict(data[0])
output = network.model.predict(data[0][None, :, :])[0, :, :]
#print(np.round(output[:, None], 1))
np.concatenate([data[1], np.round(output, 1)], axis=1)
batch_data = errors
batch_representation, batch_expected = (np.array(x) for x in zip(*batch_data))
X = np.array(batch_representation)
y = np.array(batch_expected)
thresh = int(0.9 * X.shape[0])
X_train, y_train = [x[:thresh, :, :] for x in (X, y)]
X_test, y_test = [x[thresh:, :, :] for x in (X, y)]
epochs = 100
shuffle = True
batch_size = 4
network.model.fit(X_train, y_train, batch_size=batch_size, epochs=epochs, shuffle=shuffle, validation_data=(X_test, y_test))
# TODO: look at cost function (verbose=1)
# TODO: look at outputs
# TODO: offline learning
# TODO: add random composant
# TODO: split model
# print(model.summary())
#from keras.utils.vis_utils import plot_model
#plot_model(model, to_file='model_plot.png', show_shapes=True, show_layer_names=True)
``` |
{
"source": "jrapin/submitit",
"score": 3
} |
#### File: submitit/docs/mnist.py
```python
import functools
import pickle
import time
from pathlib import Path
import numpy as np
from sklearn.datasets import fetch_openml
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.utils import check_random_state
import submitit
class MnistTrainer(submitit.helpers.Checkpointable):
"""
This shows how to rewrite a monolith function so that it can handle preemption nicely,
and not restart from scratch everytime it's preempted.
"""
def __init__(self, clf):
# This is the state that will be saved by `checkpoint`
self.train_test = None
self.scaler = None
self.clf = clf
self.trained_clf = False
self.stage = "0"
def __call__(self, train_samples: int, model_path: Path = None):
# `train_samples` and `model_path` will also be saved
log = functools.partial(print, flush=True)
log(f"*** Starting from stage '{self.stage}' ***")
if self.train_test is None:
self.stage = "Data Loading"
t0 = time.time()
log(f"*** Entering stage '{self.stage}' ***")
# Load data from https://www.openml.org/d/554
X, y = fetch_openml("mnist_784", version=1, return_X_y=True)
random_state = check_random_state(0)
permutation = random_state.permutation(X.shape[0])
X = X[permutation]
y = y[permutation]
X = X.reshape((X.shape[0], -1))
# Checkpoint 1: save the train/test splits
X_train, X_test, y_train, y_test = train_test_split(
X, y, train_size=train_samples, test_size=10000
)
self.train_test = X_train, X_test, y_train, y_test
log(f"Loaded data, shuffle and split in {time.time() - t0:.1f}s")
X_train, X_test, y_train, y_test = self.train_test
if self.scaler is None:
self.stage = "Data Cleaning"
t0 = time.time()
log(f"*** Entering stage '{self.stage}' ***")
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)
# Scaling is actual pretty fast, make it a bit slower to allow preemption to happen here
time.sleep(10)
# Checkpoint 2: save the scaler and the preprocessed data
self.scaler = scaler
self.train_test = X_train, X_test, y_train, y_test
log(f"Scaled the data took {time.time() - t0:.0f}s")
if not self.trained_clf:
self.stage = "Model Training"
t0 = time.time()
log(f"*** Entering stage '{self.stage}' ***")
self.clf.C = 50 / train_samples
self.clf.fit(X_train, y_train)
# Checkpoint 3: mark the classifier as trained
self.trained_clf = True
log(f"Training took {time.time() - t0:.0f}s")
sparsity = np.mean(self.clf.coef_ == 0) * 100
score = self.clf.score(X_test, y_test)
log(f"Sparsity with L1 penalty: {sparsity / 100:.2%}")
log(f"Test score with L1 penalty: {score:.4f}")
if model_path:
self.save(model_path)
return score
def checkpoint(self, *args, **kwargs):
print(f"Checkpointing at stage '{self.stage}'")
return super().checkpoint(*args, **kwargs)
def save(self, model_path: Path):
with open(model_path, "wb") as o:
pickle.dump((self.scaler, self.clf), o, pickle.HIGHEST_PROTOCOL)
def main():
t0 = time.time()
# Cleanup log folder.
# This folder may grow rapidly especially if you have large checkpoints,
# or submit lot of jobs. You should think about an automated way of cleaning it.
folder = Path(__file__).parent / "mnist_logs"
if folder.exists():
for file in folder.iterdir():
file.unlink()
ex = submitit.AutoExecutor(folder)
if ex.cluster == "slurm":
print("Executor will schedule jobs on Slurm.")
else:
print(f"!!! Slurm executable `srun` not found. Will execute jobs on '{ex.cluster}'")
model_path = folder / "model.pkl"
trainer = MnistTrainer(LogisticRegression(penalty="l1", solver="saga", tol=0.1, multi_class="auto"))
# Specify the job requirements.
# Reserving only as much resource as you need ensure the cluster resource are
# efficiently allocated.
ex.update_parameters(mem_gb=1, cpus_per_task=4, timeout_min=5)
job = ex.submit(trainer, 5000, model_path=model_path)
print(f"Scheduled {job}.")
# Wait for the job to be running.
while job.state != "RUNNING":
time.sleep(1)
print("Run the following command to see what's happening")
print(f" less +F {job.paths.stdout}")
# Simulate preemption.
# Tries to stop the job after the first stage.
# If the job is preempted before the end of the first stage, try to increase it.
# If the job is not preempted, try to decrease it.
time.sleep(25)
print(f"preempting {job} after {time.time() - t0:.0f}s")
job._interrupt()
score = job.result()
print(f"Finished training. Final score: {score}.")
print(f"---------------- Job output ---------------------")
print(job.stdout())
print(f"-------------------------------------------------")
assert model_path.exists()
with open(model_path, "rb") as f:
(scaler, clf) = pickle.load(f)
sparsity = np.mean(clf.coef_ == 0) * 100
print(f"Sparsity with L1 penalty: {sparsity / 100:.2%}")
if __name__ == "__main__":
main()
```
#### File: submitit/submitit/conftest.py
```python
import time
from pathlib import Path
import pytest
from .local.local import LocalExecutor
@pytest.fixture()
def executor(tmp_path: Path) -> LocalExecutor:
return LocalExecutor(tmp_path)
@pytest.fixture(params=["a_0", "a 0", 'a"=0"', "a'; echo foo", r"a\=0", r"a\=", "a\n0"])
def weird_tmp_path(request, tmp_path: Path) -> Path:
return tmp_path / request.param
@pytest.fixture()
def fast_forward_clock(monkeypatch):
"""Allows to go in the future."""
clock_time = [time.time()]
monkeypatch.setattr(time, "time", lambda: clock_time[0])
def _fast_forward(minutes: float):
clock_time[0] += minutes * 60
return _fast_forward
``` |
{
"source": "jraporta/Dog",
"score": 4
} |
#### File: jraporta/Dog/dog.py
```python
class Dog:
species = 'caniche'
def __init__(self, name, age):
self.name = name
self.age = age
bambi = Dog("Bambi", 5)
mikey = Dog("Rufus", 6)
blacky = Dog("Fosca", 9)
coco = Dog("Coco", 13)
perla = Dog("Neska", 3)
print("{} is {} and {} is {}.". format(bambi.name, bambi.age, mikey.name, mikey.age))
if bambi.species == "caniche":
print("{0} is a {1}!".format(bambi.name, bambi.species))
def get_biggest_number (*argument):
max=0
for i in argument:
if (type(i) is int) == False:
return ("Error: Arguments must be integers")
if i>max:
max=i
return (max)
print('The biggest number among the given is: ' , get_biggest_number(1 , 4 , 5 , -4 , 6 , 123 , 0))
``` |
{
"source": "jraporta/Segunda_Prueba",
"score": 3
} |
#### File: Segunda_Prueba/helloworld/main.py
```python
import sys
def main(argv=None):
if argv is None:
argv = sys.argv
print ("Hello, world. Esto son pruebas del curso IoT.")
return 0
``` |
{
"source": "jraramhoej/ironhack_final_project",
"score": 2
} |
#### File: ironhack_final_project/application/views.py
```python
from flask import Blueprint, render_template, request, Response
from flask_login import login_required, current_user
from .models import Slack
from . import db
import json
import pandas as pd
from application.helper_functions import send_response_message, get_slack_data, time_series_analysis, network_analysis
views = Blueprint('views', __name__)
# home page with links to other pages
@views.route('/', methods=['GET', 'POST'])
@login_required
def home():
# define slack user id for current user
slack_user_id = current_user.slack_user_id
# load data from database
query = "SELECT * FROM slack WHERE user_id = \"" + slack_user_id + "\";"
# execute query on database from pandas
df = pd.read_sql(query, db.session.bind)
# analyse data
if len(df) != 0:
users = network_analysis(df)
return render_template(
"home.html",
user=current_user,
users=users,
)
else:
return render_template(
"error_page.html",
user=current_user
)
# endpoint for retrieving slack data
@views.route("/slack", methods=["POST"])
def slack():
if request.method == "POST":
# retrieve data from endpoint
data = request.form
# define user
user_id = data.get('user_id')
# define user input text from slash command
text = data.get("text")
# send response message to slack user
send_response_message(user_id)
# save slack data to database
for row in list(get_slack_data(user_id, text).to_records(index=False)):
client_msg_id = Slack.query.filter_by(client_msg_id=row[0]).first()
if client_msg_id is None:
db.session.add(Slack(client_msg_id=row[0], user_id=row[1], reply_users=row[2], user=row[3], text=row[4], ts=row[5]))
db.session.commit()
else:
pass
return Response(), 200
# page for displaying time series data
@views.route('/time_series', methods=['GET', 'POST'])
@login_required
def message_count():
# define slack user id for current user
slack_user_id = current_user.slack_user_id
# load data from database
query = "SELECT * FROM slack WHERE user_id = \"" + slack_user_id + "\";"
# execute query on database from pandas
df = pd.read_sql(query, db.session.bind)
if len(df) != 0:
# modify data for total number of messages time series
time_series = time_series_analysis(df)
# time series data
date_labels = list(time_series["data"].index.strftime("%m-%d-%y"))
over_time_messages = list(time_series["data"]["count"])
# time series prediction
date_labels_pred = list(time_series["predictions"].index.strftime("%m-%d-%y"))
over_time_messages_pred = list(time_series["predictions"])
return render_template(
"message_count.html",
user=current_user,
over_time_messages=json.dumps(over_time_messages),
date_labels =json.dumps(date_labels),
over_time_messages_pred=json.dumps(over_time_messages_pred),
date_labels_pred =json.dumps(date_labels_pred)
)
else:
return render_template(
"error_page.html",
user=current_user
)
@views.route('/graph', methods=['GET', 'POST'])
@login_required
def graph():
# define slack user id for current user
slack_user_id = current_user.slack_user_id
# load data from database
query = "SELECT * FROM slack WHERE user_id = \"" + slack_user_id + "\";"
# execute query on database from pandas
df = pd.read_sql(query, db.session.bind)
if len(df) != 0:
network_analysis(df)
return render_template("graph.html", user=current_user)
else:
return render_template(
"error_page.html",
user=current_user
)
``` |
{
"source": "jrasero/confounds",
"score": 3
} |
#### File: confounds/confounds/harmonize.py
```python
from confounds.base import BaseDeconfound
class Harmonize(BaseDeconfound):
"""
Estimator to transform the input features to harmonize the input features
across a given set of confound variables.
Example methods include:
Scaling (global etc)
Normalization (Quantile, Functional etc)
Surrogate variable analysis
ComBat
"""
def __init__(self):
"""Constructor"""
super().__init__(name='Harmonize')
raise NotImplementedError()
``` |
{
"source": "jraska1/py-algo-complex",
"score": 2
} |
#### File: jraska1/py-algo-complex/main.py
```python
import click
import numpy as np
from scipy.optimize import curve_fit
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
SAMPLE_COUNT = 100
REGRESSION_FUNCTIONS = {
'O(1)': (lambda x, a: a, "{0:6f}"),
'O(log n)': (lambda x, a, b: a + b * np.log2(x), "{0:6f} + {1:6f} * log2(x)"),
'O(n)': (lambda x, a, b: a + b * x, "{0:6f} + {1:6f} * x"),
'O(n log n)': (lambda x, a, b: a + b * x * np.log2(x), "{0:6f} + {1:6f} * x * log2(x)"),
'O(n^2)': (lambda x, a, b: a + b * np.power(x, 2, dtype=float), "{0:6f} + {1:6f} * x^2"),
'O(n^2 log n)': (lambda x, a, b: a + b * np.power(x, 2, dtype=float) * np.log2(x), "{0:6f} + {1:6f} * x^2 * log2(x)"),
'O(n^3)': (lambda x, a, b: a + b * np.power(x, 3, dtype=float), "{0:6f} + {1:6f} * x^3"),
'O(2^n)': (lambda x, a, b: a + b * np.power(2, x, dtype=float), "{0:6f} + {1:6f} * 2^x"),
}
def set_verbose(ctx, param, value):
click.get_current_context().obj['verbose'] = value
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('-x', '--x-value', type=int, help="Independent variable to predict execution time")
@click.option('--sample-count', type=int, default=SAMPLE_COUNT, show_default=True, help="Number of samples used for data normalization")
@click.option('--delimiter', type=str, default=' ', show_default=True, help="Field delimiter character")
@click.option('-v', '--verbose', count=True, callback=set_verbose, expose_value=False, help='To be more verbose')
@click.argument('src', type=click.File('r'))
def cli(x_value, sample_count, delimiter, src):
"""
Algorithm Complexity Checker - tool for estimating complexity of software, computing regression parameters and predicting execution time.
Produced by: DSW - Dwarf Software Workshop
Licence: Apache Licence, version 2.0
"""
verbose(">>> Phase Data Acquisition <<<", level=2)
x_values, y_values = [], []
for line in src:
a, b = line.split(delimiter)
x_values.append(int(a))
y_values.append(float(b))
verbose(f"SOURCE DATA X: {x_values}", level=2)
verbose(f"SOURCE DATA Y: {y_values}", level=2)
complexity = complexity_phase(x_values, y_values, sample_count)
verbose(f"Algorithm Complexity Estimation: {complexity}", level=0)
popt = regression_phase(x_values, y_values, complexity)
verbose(f"Regression Function: {REGRESSION_FUNCTIONS[complexity][1].format(*popt)}", level=0)
if x_value is not None:
verbose(f"Predicted Execution Time: {predict_phase(complexity, x_value, popt):6f}", level=0)
def complexity_phase(x_values, y_values, samples):
"""
Chooses algorithm complexity, which best suites provided data sample.
:param x_values: independent variable representing sample data count
:param y_values: dependent variable representing execution time (usually in seconds)
:param samples: number of samples used for normalization
:return: algorithm complexity label
"""
verbose(">>> Phase Complexity Check <<<", level=2)
x = np.array(x_values)
y = np.array(y_values)
xx = np.linspace(np.min(x), np.max(x), samples, dtype=int)
yy = np.interp(xx, x, y)
min_y = np.min(yy)
max_y = np.max(yy)
norm_x = np.arange(1, samples + 1)
norm_y = (yy - min(y)) / (max_y - min_y)
verbose(f"Normalized X: {norm_x}", level=2)
verbose(f"Normalized Y: {norm_y}", level=2)
complexity = {
'O(1)': (lambda v: np.ones(v.shape), 2.0),
'O(log n)': (lambda v: np.log2(v), np.log2(samples)),
'O(n)': (lambda v: v, samples),
'O(n log n)': (lambda v: v * np.log2(v), samples * np.log2(samples)),
'O(n^2)': (lambda v: np.power(v, 2), np.power(samples, 2)),
'O(n^2 log n)': (lambda v: np.power(v, 2) * np.log2(v), np.power(samples, 2) * np.log2(samples)),
'O(n^3)': (lambda v: np.power(v, 3), np.power(samples, 3)),
'O(2^n)': (lambda v: np.exp2(v), np.exp2(samples)),
}
res = []
for comp, (func, coef) in complexity.items():
z = np.sum(np.power(norm_y - func(norm_x) / coef, 2))
res.append((comp, z))
verbose(f"Least Squares Results: {res}", level=1)
return min(res, key=lambda a: a[1])[0]
def regression_phase(x_values, y_values, label):
"""
Computes regression function parameters.
:param x_values: independent variable representing sample data count
:param y_values: dependent variable representing execution time (usually in seconds)
:param label: complexity label
:return: regression function parameters
"""
verbose(">>> Phase Regression Computing <<<", level=4)
x = np.array(x_values, dtype=float)
y = np.array(y_values, dtype=float)
popt, pcov = curve_fit(REGRESSION_FUNCTIONS[label][0], x, y)
verbose(f"Regression Function Parameters: {popt}", level=1)
verbose(f"Regression Parameters Error: {np.sqrt(np.diag(pcov))}", level=1)
return popt
def predict_phase(label, x, popt):
"""
Evaluates algorithm complexity function for provided variable and computed parameters.
:param label: complexity label
:param x: independent variable
:param popt: complexity function parameters
:return: function evaluation result
"""
verbose(">>> Phase Execution Time Prediction <<<", level=2)
return REGRESSION_FUNCTIONS[label][0](x, *popt)
def verbose(message, *, level=1):
"""
Write a message to stdout, if the verbose flag is set on.
:param message: message to be written
:param level: required level of verbosity
"""
if click.get_current_context().obj.get('verbose', 0) >= level:
print(message)
if __name__ == '__main__':
cli(obj={})
``` |
{
"source": "jraska1/py-isac",
"score": 2
} |
#### File: jraska1/py-isac/py-isac.py
```python
import sys
import click
import requests
import json
import re
import base64
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
URL_BASE = "http://localhost:8080/g3/"
API_VERSION_MIN = "4.01.00"
@click.group(context_settings=CONTEXT_SETTINGS)
@click.option('-b', '--base', type=str, default=URL_BASE, show_default=True, help="Service Base URL")
@click.option('-u', '--user', type=str, default='amis', show_default=True, help="User login")
@click.option('-p', '--password', default='<PASSWORD>', show_default=True, type=str, help="User password")
@click.option('--username', type=str, help="Full User name")
@click.option('--pretty', type=bool, default=False, is_flag=True, show_default=True, help="print pretty formatted output")
@click.pass_context
def cli(ctx, base, user, password, username, pretty):
"""
ICZ ISAC Client - tools for calling services through the REST API.
This tools are intended for testing purposes only.
"""
ctx.ensure_object(dict)
ctx.obj.update({
'base': base.rstrip('/'),
'user': user,
'password': password,
'username': username,
'pretty': pretty,
})
def validate_date(ctx, param, value):
if value and not re.match(r'^\d{4}-\d{2}-\d{2}$', value):
raise click.BadParameter('date parameter should be in format YYYY-MM-DD')
return value
def call_api(url, *, params=None, json=None, headers=None, auth=(), accept_codes=()):
try:
resp = requests.post(url, data=params, json=json, headers=headers, auth=auth)
if resp.status_code in accept_codes:
return None
resp.raise_for_status()
return resp.text
except requests.exceptions.RequestException as e:
print(f"Connection error: {e}", file=sys.stderr)
sys.exit(1)
@cli.command()
@click.pass_context
def info(ctx):
"""
ISAC Communication Node operational information.
"""
params = {
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/app.json', params=params, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.pass_context
def config(ctx):
"""
Communication Node Configuration information.
"""
params = {
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/nodeconfig.json', params=params, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.pass_context
def status(ctx):
"""
Communication Node Status information.
"""
params = {
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/nodestatus.json', params=params, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.pass_context
def provider(ctx):
"""
HealthCare Provider detail information as a source for editing.
"""
params = {
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/confedit/provider.json', params=params, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.pass_context
def prodsys(ctx):
"""
Production system detail information.
"""
params = {
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/prodsystem/get.json', params=params, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.option('--id', type=str, required=True, help="Patient ID - rodne cislo")
@click.option('--firstname', type=str, required=False, help="Patient First Name")
@click.option('--lastname', type=str, required=False, help="Patient Last Name")
@click.pass_context
def patsum(ctx, id, firstname, lastname):
"""
Patient Emergency Information Summary Client.
"""
params = {
'rc': id,
'firstname': firstname,
'lastname': lastname,
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/ec.json', params=params, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.option('--provoid', type=str, required=True, help="HealthCare Provider OID")
@click.option('--eventid', type=str, required=True, help="Clinical Event ID")
@click.pass_context
def docview(ctx, provoid, eventid):
"""
Patient Clinical Event Documentation View Client.
"""
params = {
'icz': provoid,
'eventId': eventid,
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/DocumentView.json', params=params, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.option('--rc', type=str, required=True, help="Patient ID - <NAME>")
@click.option('--lastname', type=str, help="Patient Surname")
@click.option('--from', 'fromdate', type=str, default='2000-01-01', show_default=True, callback=validate_date, help="Start date for searching documents")
@click.option('--to', 'todate', type=str, callback=validate_date, help="End date for searching documents")
@click.pass_context
def survey(ctx, rc, lastname, fromdate, todate):
"""
Patient Documentation Survey Client.
"""
params = {
'rc': rc,
'lastname': lastname,
'from': fromdate,
'to': todate,
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/survey.json', json=params, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.option('--patoid', type=str, required=False, help="Patient OID")
@click.option('--orgoid', type=str, required=True, help="HealthCare Provider OID")
@click.option('--docoid', type=str, required=True, help="Document OID")
@click.option('--bodytype', type=str, required=False, default="text/plain", show_default=True, help="Required MimeType of the Document")
@click.option('-o', '--output', type=click.File(mode='wb'), required=False, help="Write the Document Body to file")
@click.pass_context
def handover(ctx, patoid, orgoid, docoid, bodytype, output):
"""
Patient Documentation Handover Client.
"""
params = {
'patoid': patoid,
'orgoid': orgoid,
'docoid': docoid,
'bodytype': bodytype,
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/handover.json', json=params, auth=(ctx.obj['user'], ctx.obj['password']))
if output:
data = json.loads(data)
if 'body' in data:
body = base64.b64decode(data['body'])
output.write(body)
else:
if ctx.obj['pretty'] and data:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.option('--contype', type=str, required=False, default="application/xml", show_default=True, help="Document Content Type")
@click.option('-i', '--input', 'inputFile', type=click.File(mode='rb'), required=False, help="Read the Document Body from file")
@click.pass_context
def senddoc(ctx, contype, inputFile):
"""
Send Document to other HealthCare Provider.
"""
params = inputFile.read()
headers = {
'Content-Type': contype,
}
data = call_api(ctx.obj['base'] + '/msgstore/senddoc.json', params=params, headers=headers, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
@cli.command()
@click.option('-o', '--output', type=click.File(mode='wb'), required=False, default=sys.stdout, show_default=True, help="Write the Document Body to file")
@click.pass_context
def recvdoc(ctx, output):
"""
Receive Document from other HealthCare Provider.
"""
data = call_api(ctx.obj['base'] + '/msgstore/download', auth=(ctx.obj['user'], ctx.obj['password']), accept_codes=(404, ))
if data:
output.write(data)
@cli.command()
@click.pass_context
def bedfund(ctx):
"""
Bed Fund Survey Client.
"""
params = {
'username': ctx.obj['username'],
}
data = call_api(ctx.obj['base'] + '/beds.json', params=params, auth=(ctx.obj['user'], ctx.obj['password']))
if ctx.obj['pretty']:
print(json.dumps(json.loads(data), indent=4))
else:
print(data)
if __name__ == '__main__':
cli(obj={}) # pylint: disable=unexpected-keyword-arg, no-value-for-parameter
``` |
{
"source": "jrasky/planetlabs-challenge",
"score": 4
} |
#### File: jrasky/planetlabs-challenge/stocks.py
```python
import unittest
def find_profit(prices, window):
"""Given a certain window size and a list of prices, find the highest
profit possible if exactly one share is bought then sold within that
perid. Returns this profit."""
# back_prices keeps track of previous prices
# this is a copy so we don't have to access prices directly
back_prices = []
# pivot is the lowest price in the window
pivot = None
# next_pivot is the lowest price in the window after pivot
# this is where pivot is moved if it falls out of the window
next_pivot = None
# accumulated maximum profit
profit = 0
# this is the only direct access of prices, and only assumes that an
# __iter__ function is available
for i, price in enumerate(prices):
# add the current price to back_prices
back_prices.append(price)
# trim the back prices list to only be the window length
while len(back_prices) > window + 1:
back_prices.pop(0)
# test to see if we've found a lower pivot
if pivot is None or price < back_prices[pivot - i - 1]:
# set the pivot and the pivot price
pivot = i
# bump the next_pivot if we've passed it
next_pivot = max(next_pivot, pivot + 1)
# test to see if we've found a lower next_pivot
if next_pivot is None or (next_pivot <= i and price <
back_prices[next_pivot - i - 1]):
# set it and the next_price
next_pivot = i
# test to see if the pivot has fallen out of the window
if i - pivot == window:
# move the pivot to the next position
pivot = next_pivot
# set the next_pivot to one after the new pivot
next_pivot = pivot + 1
# update the profit accumulator
profit = max(profit, price - back_prices[pivot - i - 1])
# return the accumulated profit once done
return profit
# pylint: disable=R0904
class StockProfitTests(unittest.TestCase):
"""Unit tests for the find_profit function"""
def test_increase(self):
"""Test an increasing window size with a simple list"""
self.assertEqual(find_profit([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], 2), 1.0)
self.assertEqual(find_profit([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], 3), 2.0)
self.assertEqual(find_profit([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], 4), 3.0)
def test_window_sizes(self):
"""Test various difficult lists with window sizes larger than the best
size possible"""
self.assertEqual(find_profit([1.0, 2.0, 3.0, 1.0, 3.0, 4.0], 5), 3.0)
self.assertEqual(find_profit([7.0, 5.0, 6.0, 4.0, 5.0, 3.0, 4.0, 2.0,
3.0, 1.0], 5), 1.0)
self.assertEqual(find_profit([4.0, 3.0, 2.0, 4.0, 3.0, 1.0, 1.1, 1.2,
1.3, 1.4], 5), 2.0)
def test_shifting(self):
"""Test a growing window, where each increase makes for a different
profit"""
self.assertEqual(find_profit([2.0, 3.0, 1.0, 2.0, 4.0, 5.0, 7.0, 8.0],
2), 2.0)
self.assertEqual(find_profit([2.0, 3.0, 1.0, 2.0, 4.0, 5.0, 7.0, 8.0],
3), 3.0)
self.assertEqual(find_profit([2.0, 3.0, 1.0, 2.0, 4.0, 5.0, 7.0, 8.0],
4), 5.0)
self.assertEqual(find_profit([2.0, 3.0, 1.0, 2.0, 4.0, 5.0, 7.0, 8.0],
5), 6.0)
self.assertEqual(find_profit([2.0, 3.0, 1.0, 2.0, 4.0, 5.0, 7.0, 8.0],
6), 7.0)
if __name__ == "__main__":
unittest.main()
``` |
{
"source": "JRasmusBm/metaborg-python",
"score": 3
} |
#### File: Wython.example/basic/example1.py
```python
def a():
print("a")
print("b")
if True:
print("True!")
if False:
print("False!")
print("c")
a()
``` |
{
"source": "jrast/bincopy",
"score": 2
} |
#### File: bincopy/tests/test_bincopy.py
```python
from __future__ import print_function
import sys
import unittest
import bincopy
from collections import namedtuple
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from unittest.mock import patch
except ImportError:
from mock import patch
class BinCopyTest(unittest.TestCase):
def test_srec(self):
binfile = bincopy.BinFile()
with open('tests/files/in.s19', 'r') as fin:
binfile.add_srec(fin.read())
with open('tests/files/in.s19', 'r') as fin:
self.assertEqual(binfile.as_srec(28, 16), fin.read())
binfile = bincopy.BinFile()
with open('tests/files/empty_main.s19', 'r') as fin:
binfile.add_srec(fin.read())
with open('tests/files/empty_main.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(padding=b'\x00'), fin.read())
# Add and overwrite the data.
binfile = bincopy.BinFile()
binfile.add_srec_file('tests/files/empty_main_rearranged.s19')
binfile.add_srec_file('tests/files/empty_main_rearranged.s19',
overwrite=True)
with open('tests/files/empty_main.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(padding=b'\x00'), fin.read())
with self.assertRaises(bincopy.Error) as cm:
binfile.add_srec_file('tests/files/bad_crc.s19')
self.assertEqual(
str(cm.exception),
"expected crc '25' in record "
"S2144002640000000002000000060000001800000022, but got '22'")
def test_bad_srec(self):
# Pack.
with self.assertRaises(bincopy.Error) as cm:
bincopy.pack_srec('q', 0, 0, '')
self.assertEqual(str(cm.exception),
"expected record type 0..3 or 5..9, but got 'q'")
# Unpack too short record.
with self.assertRaises(bincopy.Error) as cm:
bincopy.unpack_srec('')
self.assertEqual(str(cm.exception), "record '' too short")
# Unpack bad first character.
with self.assertRaises(bincopy.Error) as cm:
bincopy.unpack_srec('T0000011')
self.assertEqual(str(cm.exception),
"record 'T0000011' not starting with an 'S'")
# Unpack bad type.
with self.assertRaises(bincopy.Error) as cm:
bincopy.unpack_srec('S.0200FF')
self.assertEqual(str(cm.exception),
"expected record type 0..3 or 5..9, but got '.'")
# Unpack bad crc.
with self.assertRaises(bincopy.Error) as cm:
bincopy.unpack_srec('S1020011')
self.assertEqual(str(cm.exception),
"expected crc 'FD' in record S1020011, but got '11'")
def test_ti_txt(self):
binfile = bincopy.BinFile()
with open('tests/files/in.s19.txt', 'r') as fin:
binfile.add_ti_txt(fin.read())
with open('tests/files/in.s19.txt', 'r') as fin:
self.assertEqual(binfile.as_ti_txt(), fin.read())
binfile = bincopy.BinFile()
with open('tests/files/empty_main.s19.txt', 'r') as fin:
binfile.add_ti_txt(fin.read())
with open('tests/files/empty_main.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(padding=b'\x00'), fin.read())
# Add and overwrite the data.
binfile = bincopy.BinFile()
binfile.add_ti_txt_file('tests/files/empty_main_rearranged.s19.txt')
binfile.add_ti_txt_file('tests/files/empty_main_rearranged.s19.txt',
overwrite=True)
with open('tests/files/empty_main.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(padding=b'\x00'), fin.read())
empty = bincopy.BinFile()
binfile = bincopy.BinFile('tests/files/empty.txt')
self.assertEqual(binfile.as_ti_txt(), empty.as_ti_txt())
def test_bad_ti_txt(self):
datas = [
('bad_ti_txt_address_value.txt', 'bad section address'),
('bad_ti_txt_bad_q.txt', 'bad file terminator'),
('bad_ti_txt_data_value.txt', 'bad data'),
('bad_ti_txt_record_short.txt', 'missing section address'),
('bad_ti_txt_record_long.txt', 'bad line length'),
('bad_ti_txt_no_offset.txt', 'missing section address'),
('bad_ti_txt_no_q.txt', 'missing file terminator'),
('bad_ti_txt_blank_line.txt', 'bad line length')
]
for filename, message in datas:
binfile = bincopy.BinFile()
with self.assertRaises(bincopy.Error) as cm:
binfile.add_ti_txt_file('tests/files/' + filename)
self.assertEqual(str(cm.exception), message)
def test_compare_ti_txt(self):
filenames = [
'in.s19',
'empty_main.s19',
'convert.s19',
'out.s19',
'non_sorted_segments.s19',
'non_sorted_segments_merged_and_sorted.s19',
'in.hex',
'empty_main.hex',
'convert.hex',
'out.hex'
]
for file_1 in filenames:
file_2 = file_1 + '.txt'
try:
bin1 = bincopy.BinFile('tests/files/' + file_1)
bin2 = bincopy.BinFile('tests/files/' + file_2)
self.assertEqual(bin1.as_ti_txt(), bin2.as_ti_txt())
except bincopy.Error as exc:
print("Error comparing {} to {}: {}".format(file_1, file_2, str(exc)))
raise exc
def test_bad_ihex(self):
# Unpack.
with self.assertRaises(bincopy.Error) as cm:
bincopy.unpack_ihex('')
self.assertEqual(str(cm.exception), "record '' too short")
with self.assertRaises(bincopy.Error) as cm:
bincopy.unpack_ihex('.0011110022')
self.assertEqual(str(cm.exception),
"record '.0011110022' not starting with a ':'")
with self.assertRaises(bincopy.Error) as cm:
bincopy.unpack_ihex(':0011110022')
self.assertEqual(str(cm.exception),
"expected crc 'DE' in record :0011110022, but got '22'")
def test_ihex(self):
binfile = bincopy.BinFile()
with open('tests/files/in.hex', 'r') as fin:
binfile.add_ihex(fin.read())
with open('tests/files/in.hex', 'r') as fin:
self.assertEqual(binfile.as_ihex(), fin.read())
# Add and overwrite the data.
binfile = bincopy.BinFile()
binfile.add_ihex_file('tests/files/in.hex')
binfile.add_ihex_file('tests/files/in.hex', overwrite=True)
with open('tests/files/in.hex') as fin:
self.assertEqual(binfile.as_ihex(), fin.read())
def test_i8hex(self):
"""I8HEX files use only record types 00 and 01 (16 bit addresses).
"""
binfile = bincopy.BinFile()
binfile.add_ihex(':0100000001FE\n'
':0101000002FC\n'
':01FFFF0003FE\n'
':0400000300000000F9\n' # Will not be part of
# I8HEX output.
':00000001FF\n')
self.assertEqual(list(binfile.segments),
[
(0, b'\x01'),
(0x100, b'\x02'),
(0xffff, b'\x03')
])
self.assertEqual(binfile.as_ihex(address_length_bits=16),
':0100000001FE\n'
':0101000002FC\n'
':01FFFF0003FE\n'
':00000001FF\n')
def test_i8hex_address_above_64k(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x00', 65536)
with self.assertRaises(bincopy.Error) as cm:
binfile.as_ihex(address_length_bits=16)
self.assertEqual(
str(cm.exception),
'cannot address more than 64 kB in I8HEX files (16 bits '
'addresses)')
def test_i16hex(self):
"""I16HEX files use only record types 00 through 03 (20 bit
addresses).
"""
binfile = bincopy.BinFile()
binfile.add_ihex(':0100000001FE\n'
':01F00000020D\n'
':01FFFF0003FE\n'
':02000002C0003C\n'
':0110000005EA\n'
':02000002FFFFFE\n'
':0100000006F9\n'
':01FFFF0007FA\n'
':020000021000EC\n'
':0100000004FB\n'
':0400000500000000F7\n' # Converted to 03 in
# I16HEX output.
':00000001FF\n')
self.assertEqual(
list(binfile.segments),
[
(0, b'\x01'),
(0xf000, b'\x02'),
(0xffff, b'\x03\x04'), # 3 at 0xffff and 4 at 16 *
# 0x1000 = 0x10000.
(16 * 0xc000 + 0x1000, b'\x05'),
(16 * 0xffff, b'\x06'),
(17 * 0xffff, b'\x07')
])
self.assertEqual(binfile.as_ihex(address_length_bits=24),
':0100000001FE\n'
':01F00000020D\n'
':02FFFF000304F9\n'
':02000002C0003C\n'
':0110000005EA\n'
':02000002F0000C\n'
':01FFF000060A\n'
':02000002FFFFFE\n'
':01FFFF0007FA\n'
':0400000300000000F9\n'
':00000001FF\n')
def test_i16hex_address_above_1meg(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x00', 17 * 65535 + 1)
with self.assertRaises(bincopy.Error) as cm:
binfile.as_ihex(address_length_bits=24)
self.assertEqual(
str(cm.exception),
'cannot address more than 1 MB in I16HEX files (20 bits '
'addresses)')
def test_i32hex(self):
"""I32HEX files use only record types 00, 01, 04, and 05 (32 bit
addresses).
"""
binfile = bincopy.BinFile()
binfile.add_ihex(':0100000001FE\n'
':01FFFF0002FF\n'
':02000004FFFFFC\n'
':0100000004FB\n'
':01FFFF0005FC\n'
':020000040001F9\n'
':0100000003FC\n'
':0400000500000000F7\n'
':00000001FF\n')
self.assertEqual(binfile.as_ihex(),
':0100000001FE\n'
':02FFFF000203FB\n'
':02000004FFFFFC\n'
':0100000004FB\n'
':01FFFF0005FC\n'
':0400000500000000F7\n'
':00000001FF\n')
self.assertEqual(binfile.minimum_address, 0)
self.assertEqual(binfile.maximum_address, 0x100000000)
self.assertEqual(binfile.execution_start_address, 0)
self.assertEqual(binfile[0], 1)
self.assertEqual(binfile[0xffff], 2)
self.assertEqual(binfile[0x10000], 3)
self.assertEqual(binfile[0xffff0000], 4)
self.assertEqual(binfile[0xffff0002:0xffff0004], b'\xff\xff')
self.assertEqual(binfile[0xffffffff:0x100000000], b'\x05')
def test_i32hex_address_above_4gig(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x00', 0x100000000)
with self.assertRaises(bincopy.Error) as cm:
binfile.as_ihex(address_length_bits=32)
self.assertEqual(
str(cm.exception),
'cannot address more than 4 GB in I32HEX files (32 bits '
'addresses)')
def test_binary(self):
# Add data to 0..2.
binfile = bincopy.BinFile()
with open('tests/files/binary1.bin', 'rb') as fin:
binfile.add_binary(fin.read())
with open('tests/files/binary1.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(), fin.read())
# Add and overwrite data to 15..179.
binfile = bincopy.BinFile()
binfile.add_binary_file('tests/files/binary2.bin', 15)
binfile.add_binary_file('tests/files/binary2.bin', 15, overwrite=True)
with self.assertRaises(bincopy.Error):
# Cannot add overlapping segments.
with open('tests/files/binary2.bin', 'rb') as fin:
binfile.add_binary(fin.read(), 20)
# Exclude the overlapping part and add.
binfile.exclude(20, 1024)
with open('tests/files/binary2.bin', 'rb') as fin:
binfile.add_binary(fin.read(), 20)
with open('tests/files/binary3.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(minimum_address=0,
padding=b'\x00'), fin.read())
# Exclude first byte and read it to test adjecent add before.
binfile.exclude(0, 1)
binfile.add_binary(b'1')
with open('tests/files/binary3.bin', 'rb') as fin:
reference = b'1' + fin.read()[1:]
self.assertEqual(binfile.as_binary(minimum_address=0,
padding=b'\x00'), reference)
# Basic checks.
self.assertEqual(binfile.minimum_address, 0)
self.assertEqual(binfile.maximum_address, 184)
self.assertEqual(len(binfile), 170)
# Dump with start address beyond end of binary.
self.assertEqual(binfile.as_binary(minimum_address=512), b'')
# Dump with start address at maximum address.
self.assertEqual(binfile.as_binary(minimum_address=184), b'')
# Dump with start address one before maximum address.
self.assertEqual(binfile.as_binary(minimum_address=183), b'\n')
# Dump with start address one after minimum address.
self.assertEqual(binfile.as_binary(minimum_address=1,
padding=b'\x00'),
reference[1:])
# Dump with start address 16 and end address 18.
self.assertEqual(binfile.as_binary(minimum_address=16,
maximum_address=18), b'\x32\x30')
# Dump with start and end addresses 16.
self.assertEqual(binfile.as_binary(minimum_address=16,
maximum_address=16), b'')
# Dump with end beyond end of binary.
self.assertEqual(binfile.as_binary(maximum_address=1024,
padding=b'\x00'),
reference)
# Dump with end before start.
self.assertEqual(binfile.as_binary(minimum_address=2,
maximum_address=0), b'')
def test_binary_16(self):
binfile = bincopy.BinFile(word_size_bits=16)
binfile.add_binary(b'\x35\x30\x36\x30\x37\x30', address=5)
binfile.add_binary(b'\x61\x30\x62\x30\x63\x30', address=10)
# Basic checks.
self.assertEqual(binfile.minimum_address, 5)
self.assertEqual(binfile.maximum_address, 13)
self.assertEqual(len(binfile), 6)
# Dump with start address beyond end of binary.
self.assertEqual(binfile.as_binary(minimum_address=14), b'')
# Dump with start address at maximum address.
self.assertEqual(binfile.as_binary(minimum_address=13), b'')
# Dump with start address one before maximum address.
self.assertEqual(binfile.as_binary(minimum_address=12), b'c0')
# Dump parts of both segments.
self.assertEqual(binfile.as_binary(minimum_address=6,
maximum_address=11),
b'\x36\x30\x37\x30\xff\xff\xff\xff\x61\x30')
# Iterate over segments.
self.assertEqual(list(binfile.segments),
[
(5, b'\x35\x30\x36\x30\x37\x30'),
(10, b'\x61\x30\x62\x30\x63\x30')
])
def test_add(self):
binfile = bincopy.BinFile()
with open('tests/files/in.s19', 'r') as fin:
binfile.add(fin.read())
with open('tests/files/in.s19') as fin:
self.assertEqual(binfile.as_srec(28, 16), fin.read())
binfile = bincopy.BinFile()
with open('tests/files/in.hex', 'r') as fin:
binfile.add(fin.read())
with open('tests/files/in.hex') as fin:
self.assertEqual(binfile.as_ihex(), fin.read())
binfile = bincopy.BinFile()
with self.assertRaises(bincopy.UnsupportedFileFormatError) as cm:
binfile.add('invalid data')
binfile = bincopy.BinFile()
with self.assertRaises(bincopy.Error) as cm:
binfile.add('S214400420ED044000E8B7FFFFFFF4660F1F440000EE\n'
'invalid data')
self.assertEqual(str(cm.exception),
"record 'invalid data' not starting with an 'S'")
binfile = bincopy.BinFile()
with self.assertRaises(bincopy.Error) as cm:
binfile.add(':020000040040BA\n'
'invalid data')
self.assertEqual(str(cm.exception),
"record 'invalid data' not starting with a ':'")
with self.assertRaises(bincopy.UnsupportedFileFormatError) as cm:
binfile.add('')
def test_add_file(self):
binfile = bincopy.BinFile()
binfile.add_file('tests/files/empty_main_rearranged.s19')
with open('tests/files/empty_main.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(padding=b'\x00'), fin.read())
binfile = bincopy.BinFile()
binfile.add_file('tests/files/in.hex')
with open('tests/files/in.hex') as fin:
self.assertEqual(binfile.as_ihex(), fin.read())
binfile = bincopy.BinFile()
with self.assertRaises(bincopy.UnsupportedFileFormatError) as cm:
binfile.add_file('tests/files/hexdump.txt')
def test_init_files(self):
binfile = bincopy.BinFile('tests/files/empty_main_rearranged.s19')
with open('tests/files/empty_main.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(padding=b'\x00'), fin.read())
binfile = bincopy.BinFile(['tests/files/in.hex', 'tests/files/in.hex'],
overwrite=True)
with open('tests/files/in.hex') as fin:
self.assertEqual(binfile.as_ihex(), fin.read())
with self.assertRaises(bincopy.UnsupportedFileFormatError) as cm:
binfile = bincopy.BinFile('tests/files/hexdump.txt')
def test_array(self):
binfile = bincopy.BinFile()
with open('tests/files/in.hex', 'r') as fin:
binfile.add_ihex(fin.read())
with open('tests/files/in.i') as fin:
self.assertEqual(binfile.as_array() + '\n', fin.read())
def test_hexdump_1(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'12',address=17)
binfile.add_binary(b'34', address=26)
binfile.add_binary(b'5678', address=30)
binfile.add_binary(b'9', address=47)
with open('tests/files/hexdump.txt') as fin:
self.assertEqual(binfile.as_hexdump(), fin.read())
def test_hexdump_2(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'34', address=0x150)
binfile.add_binary(b'3', address=0x163)
binfile.add_binary(b'\x01', address=0x260)
binfile.add_binary(b'3', address=0x263)
with open('tests/files/hexdump2.txt') as fin:
self.assertEqual(binfile.as_hexdump(), fin.read())
def test_hexdump_gaps(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'1', address=0)
# One line gap as "...".
binfile.add_binary(b'3', address=32)
# Two lines gap as "...".
binfile.add_binary(b'6', address=80)
with open('tests/files/hexdump3.txt') as fin:
self.assertEqual(binfile.as_hexdump(), fin.read())
def test_hexdump_empty(self):
binfile = bincopy.BinFile()
self.assertEqual(binfile.as_hexdump(), '\n')
def test_srec_ihex_binary(self):
binfile = bincopy.BinFile()
with open('tests/files/in.hex', 'r') as fin:
binfile.add_ihex(fin.read())
with open('tests/files/in.s19', 'r') as fin:
binfile.add_srec(fin.read())
with open('tests/files/binary1.bin', 'rb') as fin:
binfile.add_binary(fin.read(), 1024)
with open('tests/files/out.hex', 'r') as fin:
self.assertEqual(binfile.as_ihex(), fin.read())
with open('tests/files/out.s19') as fin:
self.assertEqual(binfile.as_srec(address_length_bits=16), fin.read())
binfile.fill(b'\x00')
with open('tests/files/out.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(), fin.read())
def test_exclude_crop(self):
# Exclude part of the data.
binfile = bincopy.BinFile()
with open('tests/files/in.s19', 'r') as fin:
binfile.add_srec(fin.read())
binfile.exclude(2, 4)
with open('tests/files/in_exclude_2_4.s19') as fin:
self.assertEqual(binfile.as_srec(32, 16), fin.read())
binfile = bincopy.BinFile()
with open('tests/files/in.s19', 'r') as fin:
binfile.add_srec(fin.read())
binfile.exclude(3, 1024)
with open('tests/files/in_exclude_3_1024.s19') as fin:
self.assertEqual(binfile.as_srec(32, 16), fin.read())
binfile = bincopy.BinFile()
with open('tests/files/in.s19', 'r') as fin:
binfile.add_srec(fin.read())
binfile.exclude(0, 9)
with open('tests/files/in_exclude_0_9.s19') as fin:
self.assertEqual(binfile.as_srec(32, 16), fin.read())
binfile = bincopy.BinFile()
with open('tests/files/empty_main.s19', 'r') as fin:
binfile.add_srec(fin.read())
binfile.exclude(0x400240, 0x400600)
with open('tests/files/empty_main_mod.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(padding=b'\x00'), fin.read())
# Crop part of the data.
binfile = bincopy.BinFile()
binfile.add_srec_file('tests/files/in.s19')
binfile.crop(2, 4)
with open('tests/files/in_crop_2_4.s19') as fin:
self.assertEqual(binfile.as_srec(32, 16), fin.read())
binfile.exclude(2, 4)
self.assertEqual(binfile.as_binary(), b'')
# Exclude various parts of segments.
binfile = bincopy.BinFile()
binfile.add_binary(b'111111', address=8)
binfile.add_binary(b'222222', address=16)
binfile.add_binary(b'333333', address=24)
binfile.exclude(7, 8)
binfile.exclude(15, 16)
binfile.exclude(23, 24)
self.assertEqual(binfile.as_binary(),
b'111111' +
2 * b'\xff' +
b'222222' +
2 * b'\xff' +
b'333333')
binfile.exclude(20, 24)
self.assertEqual(binfile.as_binary(),
b'111111' +
2 * b'\xff' +
b'2222' +
4 * b'\xff' +
b'333333')
binfile.exclude(12, 24)
self.assertEqual(binfile.as_binary(),
b'1111' +
12 * b'\xff' +
b'333333')
binfile.exclude(11, 25)
self.assertEqual(binfile.as_binary(),
b'111' +
14 * b'\xff' +
b'33333')
binfile.exclude(11, 26)
self.assertEqual(binfile.as_binary(),
b'111' +
15 * b'\xff' +
b'3333')
# Exclude negative address range and expty address range.
binfile = bincopy.BinFile()
binfile.add_binary(b'111111')
with self.assertRaises(bincopy.Error) as cm:
binfile.exclude(4, 2)
self.assertEqual(str(cm.exception), 'bad address range')
binfile.exclude(2, 2)
self.assertEqual(binfile.as_binary(), b'111111')
def test_minimum_maximum_length(self):
binfile = bincopy.BinFile()
# Get the minimum address from an empty file.
self.assertEqual(binfile.minimum_address, None)
# Get the maximum address from an empty file.
self.assertEqual(binfile.maximum_address, None)
# Get the length of an empty file.
self.assertEqual(len(binfile), 0)
# Get from a small file.
with open('tests/files/in.s19', 'r') as fin:
binfile.add_srec(fin.read())
self.assertEqual(binfile.minimum_address, 0)
self.assertEqual(binfile.maximum_address, 70)
self.assertEqual(len(binfile), 70)
# Add a second segment to the file.
binfile.add_binary(9 * b'\x01', 80)
self.assertEqual(binfile.minimum_address, 0)
self.assertEqual(binfile.maximum_address, 89)
self.assertEqual(len(binfile), 79)
def test_iterate_segments(self):
binfile = bincopy.BinFile()
with open('tests/files/in.s19', 'r') as fin:
binfile.add_srec(fin.read())
i = 0
for address, data in binfile.segments:
del address, data
i += 1
self.assertEqual(i, 1)
self.assertEqual(len(binfile.segments), 1)
def test_segments_list(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x00', address=0)
binfile.add_binary(b'\x01\x02', address=10)
binfile.add_binary(b'\x03', address=12)
binfile.add_binary(b'\x04', address=1000)
self.assertEqual(list(binfile.segments),
[
(0, b'\x00'),
(10, b'\x01\x02\x03'),
(1000, b'\x04')
])
def test_chunks_list(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x00\x00\x01\x01\x02', address=0)
binfile.add_binary(b'\x04\x05\x05\x06\x06\x07', address=9)
binfile.add_binary(b'\x09', address=19)
binfile.add_binary(b'\x0a', address=21)
self.assertEqual(binfile.as_binary(),
b'\x00\x00\x01\x01\x02\xff\xff\xff'
b'\xff\x04\x05\x05\x06\x06\x07\xff'
b'\xff\xff\xff\x09\xff\x0a')
# Size 8, alignment 1.
self.assertEqual(list(binfile.segments.chunks(size=8)),
[
(0, b'\x00\x00\x01\x01\x02'),
(9, b'\x04\x05\x05\x06\x06\x07'),
(19, b'\x09'),
(21, b'\x0a')
])
# Size 8, alignment 2.
self.assertEqual(list(binfile.segments.chunks(size=8, alignment=2)),
[
(0, b'\x00\x00\x01\x01\x02'),
(9, b'\x04'),
(10, b'\x05\x05\x06\x06\x07'),
(19, b'\x09'),
(21, b'\x0a')
])
# Size 8, alignment 4.
self.assertEqual(list(binfile.segments.chunks(size=8, alignment=4)),
[
(0, b'\x00\x00\x01\x01\x02'),
(9, b'\x04\x05\x05'),
(12, b'\x06\x06\x07'),
(19, b'\x09'),
(21, b'\x0a')
])
# Size 8, alignment 8.
self.assertEqual(list(binfile.segments.chunks(size=8, alignment=8)),
[
(0, b'\x00\x00\x01\x01\x02'),
(9, b'\x04\x05\x05\x06\x06\x07'),
(19, b'\x09'),
(21, b'\x0a')
])
# Size 4, alignment 1.
self.assertEqual(list(binfile.segments.chunks(size=4)),
[
(0, b'\x00\x00\x01\x01'),
(4, b'\x02'),
(9, b'\x04\x05\x05\x06'),
(13, b'\x06\x07'),
(19, b'\x09'),
(21, b'\x0a')
])
# Size 4, alignment 2.
self.assertEqual(list(binfile.segments.chunks(size=4, alignment=2)),
[
(0, b'\x00\x00\x01\x01'),
(4, b'\x02'),
(9, b'\x04'),
(10, b'\x05\x05\x06\x06'),
(14, b'\x07'),
(19, b'\x09'),
(21, b'\x0a')
])
# Size 4, alignment 4.
self.assertEqual(list(binfile.segments.chunks(size=4, alignment=4)),
[
(0, b'\x00\x00\x01\x01'),
(4, b'\x02'),
(9, b'\x04\x05\x05'),
(12, b'\x06\x06\x07'),
(19, b'\x09'),
(21, b'\x0a')
])
def test_chunks_bad_arguments(self):
binfile = bincopy.BinFile()
with self.assertRaises(bincopy.Error) as cm:
list(binfile.segments.chunks(size=4, alignment=3))
self.assertEqual(str(cm.exception),
'size 4 is not a multiple of alignment 3')
with self.assertRaises(bincopy.Error) as cm:
list(binfile.segments.chunks(size=4, alignment=8))
self.assertEqual(str(cm.exception),
'size 4 is not a multiple of alignment 8')
def test_segment(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x00\x01\x02\x03\x04', 2)
# Size 4, alignment 4.
self.assertEqual(list(binfile.segments[0].chunks(size=4, alignment=4)),
[
(2, b'\x00\x01'),
(4, b'\x02\x03\x04')
])
# Bad arguments.
with self.assertRaises(bincopy.Error) as cm:
list(binfile.segments[0].chunks(size=4, alignment=8))
self.assertEqual(str(cm.exception),
'size 4 is not a multiple of alignment 8')
# Missing segment.
with self.assertRaises(bincopy.Error) as cm:
list(binfile.segments[1].chunks(size=4, alignment=8))
self.assertEqual(str(cm.exception), 'segment does not exist')
def test_add_files(self):
binfile = bincopy.BinFile()
binfile_1_2 = bincopy.BinFile()
binfile.add_binary(b'\x00')
binfile_1_2.add_binary(b'\x01', address=1)
binfile += binfile_1_2
self.assertEqual(binfile.as_binary(), b'\x00\x01')
def test_info(self):
binfile = bincopy.BinFile()
with open('tests/files/empty_main.s19', 'r') as fin:
binfile.add_srec(fin.read())
self.assertEqual(binfile.info(),
"""Header: "bincopy/empty_main.s19"
Execution start address: 0x00400400
Data ranges:
0x00400238 - 0x004002b4 (124 bytes)
0x004002b8 - 0x0040033e (134 bytes)
0x00400340 - 0x004003c2 (130 bytes)
0x004003d0 - 0x00400572 (418 bytes)
0x00400574 - 0x0040057d (9 bytes)
0x00400580 - 0x004006ac (300 bytes)
0x00600e10 - 0x00601038 (552 bytes)
""")
def test_execution_start_address(self):
binfile = bincopy.BinFile()
with open('tests/files/empty_main.s19', 'r') as fin:
binfile.add_srec(fin.read())
self.assertEqual(binfile.execution_start_address, 0x00400400)
binfile.execution_start_address = 0x00400401
self.assertEqual(binfile.execution_start_address, 0x00400401)
def test_ihex_crc(self):
self.assertEqual(bincopy.crc_ihex('0300300002337a'), 0x1e)
self.assertEqual(bincopy.crc_ihex('00000000'), 0)
def test_add_ihex_record_type_3(self):
binfile = bincopy.BinFile()
binfile.add_ihex(':0400000302030405EB')
self.assertEqual(binfile.execution_start_address, 0x02030405)
def test_add_ihex_record_type_5(self):
binfile = bincopy.BinFile()
binfile.add_ihex(':0400000501020304ED')
self.assertEqual(binfile.execution_start_address, 0x01020304)
def test_add_ihex_bad_record_type_6(self):
binfile = bincopy.BinFile()
with self.assertRaises(bincopy.Error) as cm:
binfile.add_ihex(':00000006FA')
self.assertEqual(str(cm.exception),
'expected type 1..5 in record :00000006FA, but got 6')
def test_as_ihex_bad_address_length_bits(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x00')
with self.assertRaises(bincopy.Error) as cm:
binfile.as_ihex(address_length_bits=8)
self.assertEqual(str(cm.exception),
'expected address length 16, 24 or 32, but got 8')
def test_as_srec_bad_address_length(self):
binfile = bincopy.BinFile()
with self.assertRaises(bincopy.Error) as cm:
binfile.as_srec(address_length_bits=40)
self.assertEqual(str(cm.exception),
'expected data record type 1..3, but got 4')
def test_as_srec_record_5(self):
binfile = bincopy.BinFile()
binfile.add_binary(65535 * b'\x00')
records = binfile.as_srec(number_of_data_bytes=1)
self.assertEqual(len(records.splitlines()), 65536)
self.assertIn('S503FFFFFE', records)
def test_as_srec_record_6(self):
binfile = bincopy.BinFile()
binfile.add_binary(65536 * b'\x00')
records = binfile.as_srec(number_of_data_bytes=1)
self.assertEqual(len(records.splitlines()), 65537)
self.assertIn('S604010000FA', records)
def test_as_srec_record_8(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x00')
binfile.execution_start_address = 0x123456
records = binfile.as_srec(address_length_bits=24)
self.assertEqual(records,
'S20500000000FA\n'
'S5030001FB\n'
'S8041234565F\n')
def test_word_size(self):
binfile = bincopy.BinFile(word_size_bits=16)
with open('tests/files/in_16bits_word.s19', 'r') as fin:
binfile.add_srec(fin.read())
with open('tests/files/out_16bits_word.s19') as fin:
self.assertEqual(binfile.as_srec(30, 24), fin.read())
def test_word_size_default_padding(self):
binfile = bincopy.BinFile(word_size_bits=16)
with open('tests/files/in_16bits_word_padding.hex', 'r') as fin:
binfile.add_ihex(fin.read())
with open('tests/files/out_16bits_word_padding.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(), fin.read())
def test_word_size_custom_padding(self):
binfile = bincopy.BinFile(word_size_bits=16)
with open('tests/files/in_16bits_word_padding.hex', 'r') as fin:
binfile.add_ihex(fin.read())
with open('tests/files/out_16bits_word_padding_0xff00.bin', 'rb') as fin:
self.assertEqual(binfile.as_binary(padding=b'\xff\x00'), fin.read())
def test_print(self):
binfile = bincopy.BinFile()
with open('tests/files/in.s19', 'r') as fin:
binfile.add_srec(fin.read())
print(binfile)
def test_issue_4_1(self):
binfile = bincopy.BinFile()
with open('tests/files/issue_4_in.hex', 'r') as fin:
binfile.add_ihex(fin.read())
with open('tests/files/issue_4_out.hex', 'r') as fin:
self.assertEqual(binfile.as_ihex(), fin.read())
def test_issue_4_2(self):
binfile = bincopy.BinFile()
with open('tests/files/empty_main.s19', 'r') as fin:
binfile.add_srec(fin.read())
with open('tests/files/empty_main.hex', 'r') as fin:
self.assertEqual(binfile.as_ihex(), fin.read())
def test_overwrite(self):
binfile = bincopy.BinFile()
# Overwrite in empty file.
binfile.add_binary(b'1234', address=512, overwrite=True)
self.assertEqual(binfile.as_binary(minimum_address=512), b'1234')
# Test setting data with multiple existing segments.
binfile.add_binary(b'123456', address=1024)
binfile.add_binary(b'99', address=1026, overwrite=True)
self.assertEqual(binfile.as_binary(minimum_address=512),
b'1234' + 508 * b'\xff' + b'129956')
# Test setting data crossing the original segment limits.
binfile.add_binary(b'abc', address=1022, overwrite=True)
binfile.add_binary(b'def', address=1029, overwrite=True)
self.assertEqual(binfile.as_binary(minimum_address=512),
b'1234'
+ 506 * b'\xff'
+ b'abc2995def')
# Overwrite a segment and write outside it.
binfile.add_binary(b'111111111111', address=1021, overwrite=True)
self.assertEqual(binfile.as_binary(minimum_address=512),
b'1234'
+ 505 * b'\xff'
+ b'111111111111')
# Overwrite multiple segments (all segments in this test).
binfile.add_binary(1024 * b'1', address=256, overwrite=True)
self.assertEqual(binfile.as_binary(minimum_address=256), 1024 * b'1')
def test_non_sorted_segments(self):
binfile = bincopy.BinFile()
with open('tests/files/non_sorted_segments.s19', 'r') as fin:
binfile.add_srec(fin.read())
with open('tests/files/non_sorted_segments_merged_and_sorted.s19', 'r') as fin:
self.assertEqual(binfile.as_srec(), fin.read())
def test_fill(self):
binfile = bincopy.BinFile()
# Fill empty file.
binfile.fill()
self.assertEqual(binfile.as_binary(), b'')
# Add some data and fill again.
binfile.add_binary(b'\x01\x02\x03\x04', address=0)
binfile.add_binary(b'\x01\x02\x03\x04', address=8)
binfile.fill()
self.assertEqual(binfile.as_binary(),
b'\x01\x02\x03\x04\xff\xff\xff\xff\x01\x02\x03\x04')
def test_fill_max_words(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x01', address=0)
binfile.add_binary(b'\x02', address=2)
binfile.add_binary(b'\x03', address=5)
binfile.add_binary(b'\x04', address=9)
binfile.fill(b'\xaa', max_words=2)
self.assertEqual(len(binfile.segments), 2)
self.assertEqual(binfile.segments[0].address, 0)
self.assertEqual(binfile.segments[0].data, b'\x01\xaa\x02\xaa\xaa\x03')
self.assertEqual(binfile.segments[1].address, 9)
self.assertEqual(binfile.segments[1].data, b'\x04')
def test_fill_word_size_16(self):
binfile = bincopy.BinFile(word_size_bits=16)
binfile.add_binary(b'\x01\x02', address=0)
binfile.add_binary(b'\x03\x04', address=2)
binfile.add_binary(b'\x05\x06', address=5)
binfile.add_binary(b'\x07\x08', address=9)
binfile.fill(b'\xaa\xaa', max_words=2)
self.assertEqual(len(binfile.segments), 2)
self.assertEqual(binfile.segments[0].address, 0)
self.assertEqual(binfile.segments[0].data,
b'\x01\x02\xaa\xaa\x03\x04\xaa\xaa\xaa\xaa\x05\x06')
self.assertEqual(binfile.segments[1].address, 9)
self.assertEqual(binfile.segments[1].data,
b'\x07\x08')
# Fill the rest with the default value.
binfile.fill()
self.assertEqual(len(binfile.segments), 1)
self.assertEqual(
binfile.as_binary(),
(b'\x01\x02\xaa\xaa\x03\x04\xaa\xaa\xaa\xaa\x05\x06\xff\xff\xff\xff'
b'\xff\xff\x07\x08'))
def test_set_get_item(self):
binfile = bincopy.BinFile()
binfile.add_binary(b'\x01\x02\x03\x04', address=1)
self.assertEqual(binfile[:], b'\x01\x02\x03\x04')
with self.assertRaises(IndexError):
binfile[0]
self.assertEqual(binfile[1], 1)
self.assertEqual(binfile[2], 2)
self.assertEqual(binfile[3], 3)
self.assertEqual(binfile[4], 4)
with self.assertRaises(IndexError):
binfile[5]
self.assertEqual(binfile[3:5], b'\x03\x04')
self.assertEqual(binfile[3:6], b'\x03\x04')
binfile[1:3] = b'\x05\x06'
self.assertEqual(binfile[:], b'\x05\x06\x03\x04')
binfile[3:] = b'\x07\x08\x09'
self.assertEqual(binfile[:], b'\x05\x06\x07\x08\x09')
binfile[3:5] = b'\x0a\x0b'
self.assertEqual(binfile[:], b'\x05\x06\x0a\x0b\x09')
binfile[2:] = b'\x0c'
self.assertEqual(binfile[:], b'\x05\x0c\x0a\x0b\x09')
binfile[:] = b'\x01\x02\x03\x04\x05'
self.assertEqual(binfile[:], b'\x01\x02\x03\x04\x05')
binfile[0] = 0
self.assertEqual(binfile[:], b'\x00\x01\x02\x03\x04\x05')
binfile[7] = 7
self.assertEqual(binfile[:], b'\x00\x01\x02\x03\x04\x05\xff\x07')
self.assertEqual(binfile[6], 255)
self.assertEqual(binfile[6:7], b'\xff')
self.assertEqual(binfile[6:8], b'\xff\x07')
self.assertEqual(binfile[5:8], b'\x05\xff\x07')
# Add data at high address to test get performance.
binfile[0x10000000] = 0x12
self.assertEqual(binfile[0x10000000 - 1:], b'\xff\x12')
def test_set_get_item_16(self):
binfile = bincopy.BinFile(word_size_bits=16)
binfile.add_binary(b'\x01\x02\x03\x04', address=1)
self.assertEqual(binfile[:], b'\x01\x02\x03\x04')
with self.assertRaises(IndexError):
binfile[0]
self.assertEqual(binfile[1], 0x0102)
self.assertEqual(binfile[2], 0x0304)
with self.assertRaises(IndexError):
binfile[3]
self.assertEqual(binfile[1:3], b'\x01\x02\x03\x04')
self.assertEqual(binfile[1:4], b'\x01\x02\x03\x04')
binfile[1:2] = b'\x05\x06'
self.assertEqual(binfile[:], b'\x05\x06\x03\x04')
binfile[2:] = b'\x07\x08\x09\xa0'
self.assertEqual(binfile[:], b'\x05\x06\x07\x08\x09\xa0')
binfile[5] = 0x1718
self.assertEqual(binfile[:], b'\x05\x06\x07\x08\t\xa0\xff\xff\x17\x18')
self.assertEqual(binfile[4], 0xffff)
self.assertEqual(binfile[4:5], b'\xff\xff')
self.assertEqual(binfile[3:8], b'\t\xa0\xff\xff\x17\x18')
def test_header_default_encoding(self):
binfile = bincopy.BinFile()
binfile.add_file('tests/files/empty_main.s19')
self.assertEqual(binfile.header, 'bincopy/empty_main.s19')
binfile.header = 'bincopy/empty_main.s20'
self.assertEqual(binfile.header, 'bincopy/empty_main.s20')
def test_header_no_encoding(self):
binfile = bincopy.BinFile(header_encoding=None)
binfile.add_file('tests/files/empty_main.s19')
self.assertEqual(binfile.header, b'bincopy/empty_main.s19')
binfile.header = b'bincopy/empty_main.s20'
self.assertEqual(binfile.header, b'bincopy/empty_main.s20')
binfile.header = b'\x01\x80\x88\xaa\x90'
self.assertEqual(binfile.header, b'\x01\x80\x88\xaa\x90')
with self.assertRaises(TypeError) as cm:
binfile.header = u'bincopy/empty_main.s21'
self.assertIn("expected a bytes object, but got <",
str(cm.exception))
def test_srec_no_header_encoding(self):
binfile = bincopy.BinFile(header_encoding=None)
binfile.add_srec('S0080000018088AA90B4')
self.assertEqual(binfile.as_srec().splitlines()[0],
'S0080000018088AA90B4')
def test_performance(self):
binfile = bincopy.BinFile()
# Add a 1MB consecutive binary.
chunk = 1024 * b'1'
for i in range(1024):
binfile.add_binary(chunk, 1024 * i)
self.assertEqual(binfile.minimum_address, 0)
self.assertEqual(binfile.maximum_address, 1024 * 1024)
ihex = binfile.as_ihex()
srec = binfile.as_srec()
binfile = bincopy.BinFile()
binfile.add_ihex(ihex)
binfile = bincopy.BinFile()
binfile.add_srec(srec)
def test_command_line_convert_input_formats(self):
with open('tests/files/convert.hexdump') as fin:
expected_output = fin.read()
datas = [
('srec', 'tests/files/convert.s19'),
('ihex', 'tests/files/convert.hex'),
('ti_txt', 'tests/files/convert.s19.txt'),
('binary,0x100', 'tests/files/convert.bin'),
('auto', 'tests/files/convert.s19'),
('auto', 'tests/files/convert.hex'),
('auto', 'tests/files/convert.s19.txt')
]
for input_format, test_file in datas:
try:
command = ['bincopy', 'convert', '-i', input_format, test_file, '-']
self._test_command_line_ok(command, expected_output)
except SystemExit as exc:
print("Failed converting {} as {}".format(test_file, input_format))
raise exc
def test_command_line_convert_output_formats(self):
test_file = 'tests/files/convert.hex'
binfile = bincopy.BinFile(test_file)
datas = [
('srec', binfile.as_srec()),
('srec,8', binfile.as_srec(8)),
('srec,16,24', binfile.as_srec(16, 24)),
('ihex', binfile.as_ihex()),
('ihex,16', binfile.as_ihex(16)),
('ihex,8,32', binfile.as_ihex(8, 32)),
('hexdump', binfile.as_hexdump()),
('ti_txt', binfile.as_ti_txt())
]
for output_format, expected_output in datas:
command = ['bincopy', 'convert', '-o', output_format, test_file, '-']
self._test_command_line_ok(command, expected_output)
def test_command_line_convert_output_formats_bad_parameters(self):
test_file = 'tests/files/convert.hex'
datas = [
('srec,x', "invalid srec number of data bytes 'x'"),
('srec,16,y', "invalid srec address length of 'y' bits"),
('ihex,x', "invalid ihex number of data bytes 'x'"),
('ihex,8,y', "invalid ihex address length of 'y' bits")
]
for output_format, message in datas:
command = ['bincopy', 'convert', '-o', output_format, test_file, '-']
stderr = StringIO()
with patch('sys.stderr', stderr):
with self.assertRaises(SystemExit):
self._test_command_line_raises(command)
self.assertIn(message, stderr.getvalue())
def test_command_line_convert_output_format_binary(self):
test_file = 'tests/files/convert.hex'
binfile = bincopy.BinFile(test_file)
datas = [
('binary', binfile.as_binary()),
('binary,0', binfile.as_binary(0)),
('binary,0,100', binfile.as_binary(0, 100))
]
for output_format, expected_output in datas:
command = ['bincopy', 'convert', '-o', output_format, test_file, '-']
self._test_command_line_ok_bytes(command, expected_output)
def test_command_line_convert_output_format_binary_bad_addresses(self):
test_file = 'tests/files/convert.hex'
datas = [
('binary,x', "invalid binary minimum address 'x'"),
('binary,0,y', "invalid binary maximum address 'y'")
]
for output_format, message in datas:
command = ['bincopy', 'convert', '-o', output_format, test_file, '-']
stderr = StringIO()
with patch('sys.stderr', stderr):
with self.assertRaises(SystemExit):
self._test_command_line_raises(command)
self.assertIn(message, stderr.getvalue())
def test_command_line_convert_overlapping(self):
test_file = 'tests/files/convert.hex'
command = [
'bincopy', 'convert', '-o', 'binary',
test_file, test_file,
'-'
]
with self.assertRaises(SystemExit) as cm:
self._test_command_line_raises(command)
self.assertEqual(
str(cm.exception),
'error: overlapping segments detected, give --overwrite to '
'overwrite overlapping segments')
def test_command_line_convert_overwrite(self):
test_file = 'tests/files/convert.hex'
binfile = bincopy.BinFile(test_file)
# Auto input format.
command = [
'bincopy', 'convert', '-o', 'binary',
'--overwrite',
test_file, test_file,
'-'
]
self._test_command_line_ok_bytes(command, binfile.as_binary())
# Given ihex input format.
command = [
'bincopy', 'convert', '-i', 'ihex', '-o', 'binary',
'--overwrite',
test_file, test_file,
'-'
]
self._test_command_line_ok_bytes(command, binfile.as_binary())
def test_command_line_non_existing_file(self):
subcommands = ['info', 'as_hexdump', 'as_srec', 'as_ihex']
for subcommand in subcommands:
command = ['bincopy', subcommand, 'non-existing-file']
with self.assertRaises(SystemExit) as cm:
self._test_command_line_raises(command)
self.assertEqual(cm.exception.code,
"error: [Errno 2] No such file or directory: 'non-existing-file'")
def test_command_line_non_existing_file_debug(self):
subcommands = ['info', 'as_hexdump', 'as_srec', 'as_ihex']
for subcommand in subcommands:
command = ['bincopy', '--debug', subcommand, 'non-existing-file']
with self.assertRaises(IOError):
self._test_command_line_raises(command)
def test_command_line_dump_commands_one_file(self):
test_file = 'tests/files/empty_main.s19'
binfile = bincopy.BinFile(test_file)
datas = [
('as_hexdump', binfile.as_hexdump()),
('as_srec', binfile.as_srec()),
('as_ihex', binfile.as_ihex()),
('as_ti_txt', binfile.as_ti_txt())
]
for subcommand, expected_output in datas:
command = ['bincopy', subcommand, test_file]
self._test_command_line_ok(command, expected_output)
def test_command_line_info_one_file(self):
with open('tests/files/empty_main.info.txt', 'r') as fin:
expected_output = fin.read()
self._test_command_line_ok(
['bincopy', 'info', 'tests/files/empty_main.s19'],
expected_output)
def test_command_line_info_two_files(self):
with open('tests/files/empty_main_and_in.info.txt', 'r') as fin:
expected_output = fin.read()
self._test_command_line_ok(
['bincopy', 'info', 'tests/files/empty_main.s19', 'tests/files/in.s19'],
expected_output)
def test_command_line_info_two_files_with_header_encoding(self):
with open('tests/files/empty_main_and_in_header.info.txt', 'r') as fin:
expected_output = fin.read()
self._test_command_line_ok(
['bincopy', 'info',
'--header-encoding', 'utf-8',
'tests/files/empty_main.s19',
'tests/files/in.s19'],
expected_output)
def test_command_line_info_one_file_16_bits_words(self):
with open('tests/files/in_16bits_word.info.txt', 'r') as fin:
expected_output = fin.read()
self._test_command_line_ok(
[
'bincopy', 'info',
'--word-size-bits', '16',
'tests/files/in_16bits_word.s19'
],
expected_output)
def test_bad_word_size(self):
with self.assertRaises(bincopy.Error) as cm:
bincopy.BinFile(word_size_bits=7)
self.assertEqual(str(cm.exception),
'word size must be a multiple of 8 bits, but got 7 bits')
def _test_command_line_raises(self, command):
stdout = StringIO()
try:
with patch('sys.stdout', stdout):
with patch('sys.argv', command):
bincopy._main()
finally:
self.assertEqual(stdout.getvalue(), '')
def _test_command_line_ok(self, command, expected_output):
stdout = StringIO()
with patch('sys.stdout', stdout):
with patch('sys.argv', command):
bincopy._main()
self.assertEqual(stdout.getvalue(), expected_output)
def _test_command_line_ok_bytes(self, command, expected_output):
if sys.version_info[0] >= 3:
Stdout = namedtuple('stdout', ['buffer'])
stdout = Stdout(BytesIO())
with patch('sys.stdout', stdout):
with patch('sys.argv', command):
bincopy._main()
self.assertEqual(stdout.buffer.getvalue(), expected_output)
else:
stdout = StringIO()
with patch('sys.stdout', stdout):
with patch('sys.argv', command):
bincopy._main()
self.assertEqual(stdout.getvalue(), expected_output)
def test_ignore_blank_lines_hex(self):
binfile = bincopy.BinFile()
with open('tests/files/in_blank_lines.hex', 'r') as fin:
binfile.add_ihex(fin.read())
with open('tests/files/in.hex', 'r') as fin:
self.assertEqual(binfile.as_ihex(), fin.read())
def test_ignore_blank_lines_srec(self):
binfile = bincopy.BinFile()
with open('tests/files/in_blank_lines.s19', 'r') as fin:
binfile.add_srec(fin.read())
with open('tests/files/in.s19', 'r') as fin:
self.assertEqual(binfile.as_srec(28, 16), fin.read())
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jrathert/CookieReader",
"score": 3
} |
#### File: jrathert/CookieReader/cookiereader.py
```python
import configparser
import os
import sqlite3
import sys
import secretstorage
from Crypto.Cipher import AES
from Crypto.Protocol.KDF import PBKDF2
def get_cookie_crypto_key():
# get a key that will be used to decrypt the encrypted cookie values in Chrom{ium|e}s database
# first read password from linux keychain storage
bus = secretstorage.dbus_init()
collection = secretstorage.get_default_collection(bus)
passwd = ''
for item in collection.get_all_items():
if item.get_label() == 'Chrome Safe Storage':
passwd = item.get_secret()
break
# something went wrong that we cannot handle
if passwd == '':
return None
# Now use the password and some default values used by both Chrome and Chromium in OSX and Linux
# to create and return the key
salt = b'salty<PASSWORD>'
length = 16
iterations = 1 # 1003 on Mac, 1 on Linux
return PBKDF2(passwd, salt, length, iterations)
def query_chrome(cookie_file: str, hosts=None):
# query chromes cookie file for all cookies of the specified hosts (or all)
if hosts is None:
hosts = list()
def clean_padding(x):
# helper function to get rid of padding and then decode bytestrings
return x[:-x[-1]].decode('utf8') if len(x) > 0 else ''
# create the query...
if len(hosts) > 0:
where_clause = "where " + " or ".join([f"host_key like '{h}'" for h in hosts])
else:
where_clause = ""
qry = f"select host_key, name, encrypted_value from cookies {where_clause}"
# ...read (encrypted) cookies from sqlite db...
con = sqlite3.connect(cookie_file)
cur = con.cursor()
results = cur.execute(qry).fetchall()
cur.close()
con.close()
# ...get the necessary decryption key...
key = get_cookie_crypto_key()
if key is None:
print("ERROR: Could not retrieve decryption key - exiting")
return
iv = b' ' * 16
# ...and print the cookies (by decrypting them)
i = 0
print("{")
for host, name, enc_value in results:
i += 1
end = ',\n' if i < len(results) else '\n'
encrypted = enc_value[3:] # [3:] cuts chrome version prefix
cipher = AES.new(key, AES.MODE_CBC, IV=iv)
decrypted = clean_padding(cipher.decrypt(encrypted))
print(f' "{name}": "{decrypted}"', end=end)
print("}")
def query_firefox(cookie_file: str, hosts: list):
# query chromes cookie file for all cookies of the specified hosts (or all)
# create the query...
if len(hosts) > 0:
where_clause = "where " + " or ".join([f"host like '{h}'" for h in hosts])
else:
where_clause = ""
qry = f"select host, name, value from moz_cookies {where_clause}"
# ...read cookies from sqlite db...
con = sqlite3.connect(cookie_file)
cur = con.cursor()
results = cur.execute(qry).fetchall()
cur.close()
con.close()
# ...and print them
i = 0
print("{")
for host, name, value in results:
i = i + 1
end = ',\n' if i < len(results) else '\n'
print(f' "{name}": "{value}"', end=end)
print("}")
def find_ff_dir(home):
# try to identify the firefox profile directory
ff_dir = home + '/.mozilla/firefox/'
if os.path.isfile(ff_dir + 'profiles.ini'):
cfg = configparser.ConfigParser()
cfg.read(ff_dir + 'profiles.ini')
for s in cfg.sections():
opts = cfg.options(s)
if "default" in opts and "path" in opts and cfg.get(s, "default") == '1':
return cfg.get(s, "path")
return None
def print_usage():
# some help
name = os.path.basename(__file__)
print(f"Usage: {name} chrome|firefox [-f cookie_file] [ host(s) ]")
print(f" chrome|firefox : mandatory, specify what browser cookies you want to read")
print(f" -f cookie_file : optional, specify file to read (needed, if program cannot determine file)")
print(f" host(s) : optional, list of hosts/host patterns for which you want to see cookies")
print(f"Examples:")
print(f" # list all cookies from firefox default cookie DB")
print(f" $ python {name} firefox ")
print(f" # list cookies from specified firefox cookie DB")
print(f" $ python {name} firefox -f /home/joe/.mozilla/Profile/cookies.sqlite")
print(f" # list all chrome cookies from www.microsoft.com or www.facebook.com")
print(f" $ python {name} chrome www.microsoft.com www.facebook.com")
print(f" # list cookies stored by *.apple.com domains in specified chrome cookie DB")
print(f" $ python {name} chrome -f /tmp/Cookies %.apple.com")
def main():
# mainly parsing the command line (yes, that can be done better, I know of argparse...)
# someone might need some help
if len(sys.argv) < 2 or '-h' in sys.argv:
print_usage()
exit(1)
# one mandatory argument: are we using chrome or firefox
mode = ''
if sys.argv[1] not in ["chrome", "firefox"]:
print_usage()
exit(1)
else:
mode = sys.argv[1]
# check if some specific file was given and identify where host arguments start
hosts = list()
cookie_file = ''
host_start_idx = 2
if len(sys.argv) > 2:
if sys.argv[2] == "-f":
if len(sys.argv) == 3:
print_usage()
exit(1)
else:
cookie_file = sys.argv[3]
host_start_idx = 4
if len(sys.argv) > host_start_idx:
hosts = sys.argv[host_start_idx:]
# if the cookie_file was not specified, we try to guess
if cookie_file == '':
home = os.environ['HOME']
if mode == 'chrome':
cookie_file = home + '/.config/google-chrome/Default/Cookies'
else:
ff_dir = find_ff_dir(home)
if ff_dir is not None:
cookie_file = home + '/.mozilla/firefox/' + ff_dir + '/cookies.sqlite'
# if the cookiefile is still not available or not readable - complain and exit
if cookie_file == '' or not os.access(cookie_file, os.R_OK):
print(f"ERROR: Cookie file for {mode} cannot be determined or opened")
print("Please specify file using the command line switch -f\n")
print_usage()
exit(1)
# all set - do the job
if mode == 'chrome':
query_chrome(cookie_file, hosts)
else:
query_firefox(cookie_file, hosts)
if __name__ == '__main__':
main()
``` |
{
"source": "jrauen/content",
"score": 2
} |
#### File: Integrations/AnsibleVMware/AnsibleVMware.py
```python
import ssh_agent_setup
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
# Import Generated code
from AnsibleApiModule import * # noqa: E402
host_type = 'local'
# MAIN FUNCTION
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
# SSH Key integration requires ssh_agent to be running in the background
ssh_agent_setup.setup()
# Common Inputs
command = demisto.command()
args = demisto.args()
int_params = demisto.params()
creds_mapping = {
"identifier": "username",
"password": "password"
}
try:
if command == 'test-module':
# This is the call made when pressing the integration Test button.
result = generic_ansible('VMware', 'vmware_about_info', args, int_params, host_type, creds_mapping)
if result:
return_results('ok')
else:
return_results(result)
elif command == 'vmware-about-info':
return_results(generic_ansible('VMware', 'vmware_about_info', args, int_params, host_type, creds_mapping))
elif command == 'vmware-category':
return_results(generic_ansible('VMware', 'vmware_category', args, int_params, host_type, creds_mapping))
elif command == 'vmware-category-info':
return_results(generic_ansible('VMware', 'vmware_category_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-cfg-backup':
return_results(generic_ansible('VMware', 'vmware_cfg_backup', args, int_params, host_type, creds_mapping))
elif command == 'vmware-cluster':
return_results(generic_ansible('VMware', 'vmware_cluster', args, int_params, host_type, creds_mapping))
elif command == 'vmware-cluster-drs':
return_results(generic_ansible('VMware', 'vmware_cluster_drs', args, int_params, host_type, creds_mapping))
elif command == 'vmware-cluster-ha':
return_results(generic_ansible('VMware', 'vmware_cluster_ha', args, int_params, host_type, creds_mapping))
elif command == 'vmware-cluster-info':
return_results(generic_ansible('VMware', 'vmware_cluster_info', args, int_params, host_type, creds_mapping))
elif command == 'vmware-cluster-vsan':
return_results(generic_ansible('VMware', 'vmware_cluster_vsan', args, int_params, host_type, creds_mapping))
elif command == 'vmware-content-deploy-template':
return_results(generic_ansible('VMware', 'vmware_content_deploy_template', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-content-library-info':
return_results(generic_ansible('VMware', 'vmware_content_library_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-content-library-manager':
return_results(generic_ansible('VMware', 'vmware_content_library_manager', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-datacenter':
return_results(generic_ansible('VMware', 'vmware_datacenter', args, int_params, host_type, creds_mapping))
elif command == 'vmware-datastore-cluster':
return_results(generic_ansible('VMware', 'vmware_datastore_cluster', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-datastore-info':
return_results(generic_ansible('VMware', 'vmware_datastore_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-datastore-maintenancemode':
return_results(generic_ansible('VMware', 'vmware_datastore_maintenancemode', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-dns-config':
return_results(generic_ansible('VMware', 'vmware_dns_config', args, int_params, host_type, creds_mapping))
elif command == 'vmware-drs-group':
return_results(generic_ansible('VMware', 'vmware_drs_group', args, int_params, host_type, creds_mapping))
elif command == 'vmware-drs-group-info':
return_results(generic_ansible('VMware', 'vmware_drs_group_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-drs-rule-info':
return_results(generic_ansible('VMware', 'vmware_drs_rule_info', args, int_params, host_type, creds_mapping))
elif command == 'vmware-dvs-host':
return_results(generic_ansible('VMware', 'vmware_dvs_host', args, int_params, host_type, creds_mapping))
elif command == 'vmware-dvs-portgroup':
return_results(generic_ansible('VMware', 'vmware_dvs_portgroup', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-dvs-portgroup-find':
return_results(generic_ansible('VMware', 'vmware_dvs_portgroup_find', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-dvs-portgroup-info':
return_results(generic_ansible('VMware', 'vmware_dvs_portgroup_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-dvswitch':
return_results(generic_ansible('VMware', 'vmware_dvswitch', args, int_params, host_type, creds_mapping))
elif command == 'vmware-dvswitch-lacp':
return_results(generic_ansible('VMware', 'vmware_dvswitch_lacp', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-dvswitch-nioc':
return_results(generic_ansible('VMware', 'vmware_dvswitch_nioc', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-dvswitch-pvlans':
return_results(generic_ansible('VMware', 'vmware_dvswitch_pvlans', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-dvswitch-uplink-pg':
return_results(generic_ansible('VMware', 'vmware_dvswitch_uplink_pg', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-evc-mode':
return_results(generic_ansible('VMware', 'vmware_evc_mode', args, int_params, host_type, creds_mapping))
elif command == 'vmware-folder-info':
return_results(generic_ansible('VMware', 'vmware_folder_info', args, int_params, host_type, creds_mapping))
elif command == 'vmware-guest':
return_results(generic_ansible('VMware', 'vmware_guest', args, int_params, host_type, creds_mapping))
elif command == 'vmware-guest-boot-info':
return_results(generic_ansible('VMware', 'vmware_guest_boot_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-boot-manager':
return_results(generic_ansible('VMware', 'vmware_guest_boot_manager', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-custom-attribute-defs':
return_results(generic_ansible('VMware', 'vmware_guest_custom_attribute_defs', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-custom-attributes':
return_results(generic_ansible('VMware', 'vmware_guest_custom_attributes', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-customization-info':
return_results(generic_ansible('VMware', 'vmware_guest_customization_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-disk':
return_results(generic_ansible('VMware', 'vmware_guest_disk', args, int_params, host_type, creds_mapping))
elif command == 'vmware-guest-disk-info':
return_results(generic_ansible('VMware', 'vmware_guest_disk_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-find':
return_results(generic_ansible('VMware', 'vmware_guest_find', args, int_params, host_type, creds_mapping))
elif command == 'vmware-guest-info':
return_results(generic_ansible('VMware', 'vmware_guest_info', args, int_params, host_type, creds_mapping))
elif command == 'vmware-guest-move':
return_results(generic_ansible('VMware', 'vmware_guest_move', args, int_params, host_type, creds_mapping))
elif command == 'vmware-guest-network':
return_results(generic_ansible('VMware', 'vmware_guest_network', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-powerstate':
return_results(generic_ansible('VMware', 'vmware_guest_powerstate', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-screenshot':
return_results(generic_ansible('VMware', 'vmware_guest_screenshot', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-sendkey':
return_results(generic_ansible('VMware', 'vmware_guest_sendkey', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-snapshot':
return_results(generic_ansible('VMware', 'vmware_guest_snapshot', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-snapshot-info':
return_results(generic_ansible('VMware', 'vmware_guest_snapshot_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-tools-upgrade':
return_results(generic_ansible('VMware', 'vmware_guest_tools_upgrade', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-tools-wait':
return_results(generic_ansible('VMware', 'vmware_guest_tools_wait', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-guest-video':
return_results(generic_ansible('VMware', 'vmware_guest_video', args, int_params, host_type, creds_mapping))
elif command == 'vmware-guest-vnc':
return_results(generic_ansible('VMware', 'vmware_guest_vnc', args, int_params, host_type, creds_mapping))
elif command == 'vmware-host':
return_results(generic_ansible('VMware', 'vmware_host', args, int_params, host_type, creds_mapping))
elif command == 'vmware-host-acceptance':
return_results(generic_ansible('VMware', 'vmware_host_acceptance', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-active-directory':
return_results(generic_ansible('VMware', 'vmware_host_active_directory', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-capability-info':
return_results(generic_ansible('VMware', 'vmware_host_capability_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-config-info':
return_results(generic_ansible('VMware', 'vmware_host_config_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-config-manager':
return_results(generic_ansible('VMware', 'vmware_host_config_manager', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-datastore':
return_results(generic_ansible('VMware', 'vmware_host_datastore', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-dns-info':
return_results(generic_ansible('VMware', 'vmware_host_dns_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-facts':
return_results(generic_ansible('VMware', 'vmware_host_facts', args, int_params, host_type, creds_mapping))
elif command == 'vmware-host-feature-info':
return_results(generic_ansible('VMware', 'vmware_host_feature_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-firewall-info':
return_results(generic_ansible('VMware', 'vmware_host_firewall_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-firewall-manager':
return_results(generic_ansible('VMware', 'vmware_host_firewall_manager', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-hyperthreading':
return_results(generic_ansible('VMware', 'vmware_host_hyperthreading', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-ipv6':
return_results(generic_ansible('VMware', 'vmware_host_ipv6', args, int_params, host_type, creds_mapping))
elif command == 'vmware-host-kernel-manager':
return_results(generic_ansible('VMware', 'vmware_host_kernel_manager', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-lockdown':
return_results(generic_ansible('VMware', 'vmware_host_lockdown', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-ntp':
return_results(generic_ansible('VMware', 'vmware_host_ntp', args, int_params, host_type, creds_mapping))
elif command == 'vmware-host-ntp-info':
return_results(generic_ansible('VMware', 'vmware_host_ntp_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-package-info':
return_results(generic_ansible('VMware', 'vmware_host_package_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-powermgmt-policy':
return_results(generic_ansible('VMware', 'vmware_host_powermgmt_policy', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-powerstate':
return_results(generic_ansible('VMware', 'vmware_host_powerstate', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-scanhba':
return_results(generic_ansible('VMware', 'vmware_host_scanhba', args, int_params, host_type, creds_mapping))
elif command == 'vmware-host-service-info':
return_results(generic_ansible('VMware', 'vmware_host_service_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-service-manager':
return_results(generic_ansible('VMware', 'vmware_host_service_manager', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-snmp':
return_results(generic_ansible('VMware', 'vmware_host_snmp', args, int_params, host_type, creds_mapping))
elif command == 'vmware-host-ssl-info':
return_results(generic_ansible('VMware', 'vmware_host_ssl_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-vmhba-info':
return_results(generic_ansible('VMware', 'vmware_host_vmhba_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-host-vmnic-info':
return_results(generic_ansible('VMware', 'vmware_host_vmnic_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-local-role-info':
return_results(generic_ansible('VMware', 'vmware_local_role_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-local-role-manager':
return_results(generic_ansible('VMware', 'vmware_local_role_manager', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-local-user-info':
return_results(generic_ansible('VMware', 'vmware_local_user_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-local-user-manager':
return_results(generic_ansible('VMware', 'vmware_local_user_manager', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-maintenancemode':
return_results(generic_ansible('VMware', 'vmware_maintenancemode', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-migrate-vmk':
return_results(generic_ansible('VMware', 'vmware_migrate_vmk', args, int_params, host_type, creds_mapping))
elif command == 'vmware-object-role-permission':
return_results(generic_ansible('VMware', 'vmware_object_role_permission', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-portgroup':
return_results(generic_ansible('VMware', 'vmware_portgroup', args, int_params, host_type, creds_mapping))
elif command == 'vmware-portgroup-info':
return_results(generic_ansible('VMware', 'vmware_portgroup_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-resource-pool':
return_results(generic_ansible('VMware', 'vmware_resource_pool', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-resource-pool-info':
return_results(generic_ansible('VMware', 'vmware_resource_pool_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-tag':
return_results(generic_ansible('VMware', 'vmware_tag', args, int_params, host_type, creds_mapping))
elif command == 'vmware-tag-info':
return_results(generic_ansible('VMware', 'vmware_tag_info', args, int_params, host_type, creds_mapping))
elif command == 'vmware-tag-manager':
return_results(generic_ansible('VMware', 'vmware_tag_manager', args, int_params, host_type, creds_mapping))
elif command == 'vmware-target-canonical-info':
return_results(generic_ansible('VMware', 'vmware_target_canonical_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vcenter-settings':
return_results(generic_ansible('VMware', 'vmware_vcenter_settings', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vcenter-statistics':
return_results(generic_ansible('VMware', 'vmware_vcenter_statistics', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vm-host-drs-rule':
return_results(generic_ansible('VMware', 'vmware_vm_host_drs_rule', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vm-info':
return_results(generic_ansible('VMware', 'vmware_vm_info', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vm-shell':
return_results(generic_ansible('VMware', 'vmware_vm_shell', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vm-storage-policy-info':
return_results(generic_ansible('VMware', 'vmware_vm_storage_policy_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vm-vm-drs-rule':
return_results(generic_ansible('VMware', 'vmware_vm_vm_drs_rule', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vm-vss-dvs-migrate':
return_results(generic_ansible('VMware', 'vmware_vm_vss_dvs_migrate', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vmkernel':
return_results(generic_ansible('VMware', 'vmware_vmkernel', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vmkernel-info':
return_results(generic_ansible('VMware', 'vmware_vmkernel_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vmkernel-ip-config':
return_results(generic_ansible('VMware', 'vmware_vmkernel_ip_config', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vmotion':
return_results(generic_ansible('VMware', 'vmware_vmotion', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vsan-cluster':
return_results(generic_ansible('VMware', 'vmware_vsan_cluster', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vspan-session':
return_results(generic_ansible('VMware', 'vmware_vspan_session', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vswitch':
return_results(generic_ansible('VMware', 'vmware_vswitch', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vswitch-info':
return_results(generic_ansible('VMware', 'vmware_vswitch_info', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vsphere-file':
return_results(generic_ansible('VMware', 'vsphere_file', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vcenter-extension':
return_results(generic_ansible('VMware', 'vcenter_extension', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vcenter-extension-info':
return_results(generic_ansible('VMware', 'vcenter_extension_info', args, int_params, host_type,
creds_mapping))
elif command == 'vmware-vcenter-folder':
return_results(generic_ansible('VMware', 'vcenter_folder', args, int_params, host_type, creds_mapping))
elif command == 'vmware-vcenter-license':
return_results(generic_ansible('VMware', 'vcenter_license', args, int_params, host_type, creds_mapping))
# Log exceptions and return errors
except Exception as e:
return_error(f'Failed to execute {command} command.\nError:\n{str(e)}')
# ENTRY POINT
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Scripts/SiemApiModule/SiemApiModule_test.py
```python
from typing import Any
from SiemApiModule import (
IntegrationEventsClient,
IntegrationHTTPRequest,
IntegrationOptions,
IntegrationGetEvents,
Method,
)
import json
class MyIntegrationEventsClient(IntegrationEventsClient):
def set_request_filter(self, after: Any):
"""Implement the next call run
Example:
>>> from datetime import datetime
>>> set_request_filter(datetime(year=2022, month=4, day=16))
"""
self.request.headers['after'] = after
class MyIntegrationGetEvents(IntegrationGetEvents):
@staticmethod
def get_last_run(events: list) -> dict:
"""Implement how to get the last run.
Example:
>>> get_last_run([{'created': '2022-4-16'}])
"""
return {'after': events[-1]['created']}
def _iter_events(self):
"""Create an iterator on the events.
If need extra authorisation, do that in the beggining of the command.
Example:
>>> for event in _iter_events():
...
"""
response = self.call()
while True:
events = response.json()
yield events
self.client.set_request_filter(events[-1]['created'])
self.call()
class TestSiemAPIModule:
def test_flow(self, requests_mock):
created = '2022-04-16'
requests_mock.post('https://example.com', json=[{'created': created}])
request = IntegrationHTTPRequest(
method=Method.POST, url='https://example.com'
)
options = IntegrationOptions(limit=1)
client = MyIntegrationEventsClient(request, options)
get_events = MyIntegrationGetEvents(client, options)
events = get_events.run()
assert events[0]['created'] == '2022-04-16'
def test_created(self, requests_mock):
created = '2022-04-16'
requests_mock.post('https://example.com', json=[{'created': created}])
request = IntegrationHTTPRequest(
method=Method.POST, url='https://example.com'
)
options = IntegrationOptions(limit=2)
client = MyIntegrationEventsClient(request, options)
get_events = MyIntegrationGetEvents(client, options)
get_events.run()
assert client.request.headers['after'] == '2022-04-16'
def test_headers_parsed(self):
request = IntegrationHTTPRequest(
method=Method.GET,
url='https://example.com',
headers=json.dumps({'Authorization': 'Bearer Token'}),
)
assert request.headers['Authorization']
```
#### File: Integrations/AtlassianConfluenceCloud/AtlassianConfluenceCloud.py
```python
import json
import urllib.parse
from typing import Callable, Dict, Tuple, List
import demistomock as demisto
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa
import requests
# Disable insecure warnings
requests.packages.urllib3.disable_warnings() # pylint: disable=no-member
''' CONSTANTS '''
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # ISO8601 format with UTC, default in XSOAR
LOGGING_INTEGRATION_NAME = "[Atlassian Confluence Cloud]"
HTTP_ERROR = {
401: "An error occurred while validating the credentials, please check the username or password.",
404: "The resource cannot be found.",
500: "The server encountered an internal error for Atlassian Confluence Cloud "
"and was unable to complete your request."
}
URL_SUFFIX = {
"CONTENT_SEARCH": "/wiki/rest/api/content/search",
"GROUP": "/wiki/rest/api/group",
"CONTENT": "/wiki/rest/api/content",
"USER": "/wiki/rest/api/search/user?cql=type=user",
"SPACE": "/wiki/rest/api/space",
"PRIVATE_SPACE": "/wiki/rest/api/space/_private"
}
MESSAGES = {
"REQUIRED_URL_FIELD": "Site Name can not be empty.",
"NO_RECORDS_FOUND": "No {} were found for the given argument(s).",
"LIMIT": "{} is an invalid value for limit. Limit must be between 0 and int32.",
"START": "{} is an invalid value for start. Start must be between 0 and int32.",
"INVALID_ACCESS_TYPE": "Invalid value for access type. Access type parameter must be one of 'user', 'admin', "
"or 'site-admin' ",
"REQUIRED_ARGUMENT": "Invalid argument value. {} is a required argument.",
"INVALID_CONTENT_TYPE": "Invalid value for content type. Content type parameter can be 'page' or 'blogpost' ",
"HR_DELETE_CONTENT": "Content with Id {} is deleted successfully.",
"INVALID_STATUS": "Invalid value for status. Status must be one of 'current', 'draft' or 'trashed'.",
"BAD_REQUEST": "Bad request: An error occurred while fetching the data.",
"REQUIRED_SORT_KEY": "If 'sort_order' is specified, 'sort_key' is required.",
"INVALID_STATUS_SEARCH": "Invalid value for status. Status must be one of 'current', 'any', 'archived', 'draft' "
"or 'trashed'.",
"INVALID_PERMISSION": "If the 'permission_account_id' or 'permission_group_name' arguments are given, "
"the 'permission_operations' argument must also be given.",
"INVALID_PERMISSIONS_OPERATION": "If the 'permission_operations' argument is given, "
"'permission_account_id' or 'permission_group_name' argument must also be given.",
"PERMISSION_FORMAT": "Please provide the permission in the valid JSON format. "
"Format accepted - 'operation1:targetType1,operation2:targetType2'",
"ADVANCE_PERMISSION_FORMAT": "Please provide the 'advanced_permissions' in the valid JSON format. ",
"INVALID_SPACE_STATUS": "Invalid value for status. Status must be one of 'current' or 'archived'.",
"INVALID_CONTENT_TYPE_UPDATE_CONTENT": "Invalid value for content type. Content type parameter can be 'page', "
"'blogpost', 'comment' or 'attachment'.",
"INVALID_BODY_REPRESENTATION": "Invalid value for body_representation. Body representation must be one of "
"'editor', 'editor2' or 'storage'.",
"INVALID_DELETION_TYPE": "Invalid value for deletion_type. Deletion type must be one of 'move to trash', "
"'permanent delete' or 'permanent delete draft'.",
"INVALID_TITLE_LENGTH": "Title cannot be longer than 255 characters.",
"INVALID_SPACE_NAME_LENGTH": "Space name cannot be longer than 200 characters.",
"INVALID_SPACE_KEY": "Space Key cannot be longer than 255 characters and should contain alphanumeric characters "
"only.",
"PRIVATE_SPACE_PERMISSION": "Permission can not be granted for a private space."
}
OUTPUT_PREFIX = {
"GROUP": "ConfluenceCloud.Group",
"USER": "ConfluenceCloud.User",
"CONTENT": "ConfluenceCloud.Content",
"COMMENT": "ConfluenceCloud.Comment",
"SPACE": "ConfluenceCloud.Space",
"PAGETOKEN": "ConfluenceCloud.PageToken.Content"
}
DEFAULT_LIMIT = "50"
DEFAULT_START = "0"
LEGAL_ACCESS_TYPES = ["user", "site-admin", "admin"]
LEGAL_CONTENT_STATUS = ['current', 'trashed', 'draft', 'archived', 'any']
LEGAL_CONTENT_TYPES = ["page", "blogpost"]
LEGAL_CONTENT_TYPE_UPDATE_COMMAND = ["page", "blogpost", "comment", "attachment"]
DEFAULT_EXPANDED_FIELD_CONTENT = "childTypes.all,space,version,history,ancestors,container,body"
DEFAULT_EXPANDED_FIELD_SPACE = "history"
LEGAL_SPACE_STATUS = ['current', 'archived']
LEGAL_BODY_REPRESENTATION = ['editor', 'editor2', 'storage']
LEGAL_DELETION_TYPES = {
"move to trash": "current",
"permanent delete": "trashed",
"permanent delete draft": "draft"
}
''' CLIENT CLASS '''
class Client(BaseClient):
"""Client class to interact with the service API
This Client implements API calls, and does not contain any XSOAR logic.
Should only do requests and return data.
It inherits from BaseClient defined in CommonServer Python.
Most calls use _http_request() that handles proxy, SSL verification, etc.
For this implementation, no special attributes defined
"""
def http_request(self, *args, **kwargs) -> requests.Response:
"""
Function to make http requests using inbuilt _http_request() method.
"""
kwargs['ok_codes'] = (200, 201, 204)
kwargs['error_handler'] = self.exception_handler
kwargs['resp_type'] = 'response'
return super()._http_request(*args, **kwargs)
@staticmethod
def exception_handler(response: requests.models.Response):
"""
Handle error in the response and display error message based on status code.
:type response: ``requests.models.Response``
:param response: response from API.
:raises: raise DemistoException based on status code of response.
"""
err_msg = ""
if response.status_code in HTTP_ERROR:
err_msg = HTTP_ERROR[response.status_code]
elif response.status_code > 500:
err_msg = HTTP_ERROR[500]
elif response.status_code not in HTTP_ERROR:
try:
# Try to parse json error response
error_entry = response.json()
demisto.error(f"{LOGGING_INTEGRATION_NAME} {error_entry}")
errors = error_entry.get('data', {}).get('errors', [])
if errors:
err_msg = get_error_message(errors)
elif response.status_code == 400:
err_msg = MESSAGES['BAD_REQUEST']
else:
err_msg = error_entry.get('message', '')
except ValueError:
err_msg = '{}'.format(response.text)
raise DemistoException(err_msg)
''' HELPER FUNCTIONS '''
def get_error_message(errors):
err_msg = ""
for error in errors:
if error.get('message').get('key'):
err_msg += f"{error.get('message').get('key')} \n"
if error.get('message').get('translation'):
err_msg += f"{error.get('message').get('translation')} \n"
return err_msg
def strip_args(args: dict):
"""
Strips argument dictionary values.
:type args: dict
:param args: argument dictionary
"""
for key, value in args.items():
if isinstance(value, str):
args[key] = value.strip()
def validate_url(url: str):
"""
To Validate url parameter.
:type url: str
:param url: url to validate.
"""
if not url:
raise ValueError(MESSAGES["REQUIRED_URL_FIELD"])
def remove_empty_elements_for_context(src):
"""
Recursively remove empty lists, empty dicts, empty string or None elements from a dictionary.
:type src: ``dict``
:param src: Input dictionary.
:return: Dictionary with all empty lists,empty string and empty dictionaries removed.
:rtype: ``dict``
"""
def empty(x):
return x is None or x == '' or x == {} or x == []
if not isinstance(src, (dict, list)):
return src
elif isinstance(src, list):
return [v for v in (remove_empty_elements_for_context(v) for v in src) if not empty(v)]
else:
return {k: v for k, v in ((k, remove_empty_elements_for_context(v))
for k, v in src.items()) if not empty(v)}
def validated_required_args_for_permission(permission_account_id, permission_group_name, permission_operations):
"""
Raise value-error when null-values or whitespaces are provided for permission arguments.
:type permission_account_id: ``str``
:param permission_account_id: Account ID
:type permission_group_name: ``str``
:param permission_group_name: Name of the group
:type permission_operations: ``str``
:param permission_operations: Permissions to be granted
:return: None
"""
if (permission_account_id or permission_group_name) and not permission_operations:
raise ValueError(MESSAGES["INVALID_PERMISSION"])
if permission_operations and (not permission_group_name and not permission_account_id):
raise ValueError(MESSAGES["INVALID_PERMISSIONS_OPERATION"])
def prepare_permission_object(permission_account_id: str, permission_group_name: str, attr: List) -> Dict:
"""
Prepare permission object from the user provided values
:type permission_account_id: ``str``
:param permission_account_id: Account ID of the user to whom permission should be granted.
:type permission_group_name: ``str``
:param permission_group_name: Group name to whom permission should be granted.
:type attr: ``List``
:param attr: Operation and Target Type specified by user
:rtype: ``Dict``
:return: Returns permission object
"""
permission_object = {
"subjects": {
"user": {
"results": [
{
"accountId": permission_account_id
}
]
},
"group": {
"results": [
{
"name": permission_group_name
}
]
}
},
"operation": {
"operation": attr[0],
"targetType": attr[1]
},
"anonymousAccess": False,
"unlicensedAccess": False
}
return permission_object
def validate_permissions(args: Dict[str, Any]) -> List:
"""
Validates the permission argument provided by user and prepare permission object accordingly
:type args: ``dict``
:param args: Input dictionary.
:return: Permission object.
:rtype: ``List``
"""
space_permission = []
permission_account_id = args.get('permission_account_id', '')
permission_group_name = args.get('permission_group_name', '')
permission_operations = args.get('permission_operations', '')
validated_required_args_for_permission(permission_account_id, permission_group_name, permission_operations)
if permission_operations:
# create a list of all the permission provided by user
permissions = [permission.strip() for permission in permission_operations.split(",") if permission.strip()]
# separate target_type and operation for the single permission
for permission in permissions:
if permission:
attr = [operation.strip() for operation in permission.split(":") if operation.strip()]
# if target_type or operation is missing then raise ValueError
if len(attr) != 2:
raise ValueError(MESSAGES["PERMISSION_FORMAT"])
permission_object = prepare_permission_object(permission_account_id, permission_group_name, attr)
space_permission.append(permission_object)
return space_permission
def validate_list_command_args(args: Dict[str, str]) -> Tuple[Optional[int], Optional[int]]:
"""
Validate arguments for all list commands, raise ValueError on invalid arguments.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Parameters to send in request
:rtype: ``Tuple``
"""
limit = arg_to_number(args.get('limit', DEFAULT_LIMIT))
if limit < 0 or limit > 2147483647: # type:ignore
raise ValueError(MESSAGES["LIMIT"].format(limit))
offset = arg_to_number(args.get('offset', DEFAULT_START))
if offset < 0 or offset > 2147483647: # type:ignore
raise ValueError(MESSAGES["START"].format(offset))
return limit, offset
def validate_list_group_args(args: Dict[str, str]):
"""
Validate arguments for confluence-cloud-group-list command, raise ValueError on invalid arguments.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
"""
access_type = args.get("access_type", "").lower()
if access_type and access_type not in LEGAL_ACCESS_TYPES:
raise ValueError(MESSAGES["INVALID_ACCESS_TYPE"])
return access_type
def prepare_group_args(args: Dict[str, str]) -> Dict[str, str]:
"""
Prepare params for list group command
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
"""
limit, offset = validate_list_command_args(args)
access_type = validate_list_group_args(args)
return assign_params(limit=limit, start=offset, accessType=access_type)
def prepare_hr_for_groups(groups: List[Dict[str, Any]]) -> str:
"""
Prepare human readable for list groups command.
:type groups: ``List[Dict[str, Any]]``
:param groups:The group data.
:rtype: ``str``
:return: Human readable.
"""
hr_list = []
for group in groups:
hr_record = {
'ID': group.get('id', ''),
'Name': group.get('name', '')
}
hr_list.append(hr_record)
return tableToMarkdown('Group(s)', hr_list, ['ID', 'Name'],
removeNull=True)
def prepare_content_create_params(args) -> Dict[str, Any]:
"""
Prepare json object for content create command
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Body parameters to send in request
:rtype: ``Dict[str, Any]``
"""
body_representation = args.get('body_representation', '')
params = {
"title": args['title'],
"type": args['type'].lower(),
"space": {
"key": args.get('space_key', '')
},
"status": args.get('status', 'current'),
"body": {
body_representation: {
"value": args.get('body_value', ''),
"representation": body_representation
}
},
"ancestors": [
{
"id": args.get('ancestor_id', '')
}
]
}
return remove_empty_elements_for_context(params)
def validate_create_content_args(args: Dict[str, str], is_update: bool = False):
"""
Validate arguments for confluence-cloud-content-create command, raise ValueError on invalid arguments.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:type is_update: ``bool``
:param is_update: Whether command is update content or not.
:return: None
:rtype: ``None``
"""
title = args['title']
if not title:
raise ValueError(MESSAGES['REQUIRED_ARGUMENT'].format("title"))
if len(title) > 255:
raise ValueError(MESSAGES["INVALID_TITLE_LENGTH"])
content_type = args['type'].lower()
if not content_type:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("type"))
if not is_update and content_type not in LEGAL_CONTENT_TYPES:
raise ValueError(MESSAGES["INVALID_CONTENT_TYPE"])
if is_update and content_type not in LEGAL_CONTENT_TYPE_UPDATE_COMMAND:
raise ValueError(MESSAGES["INVALID_CONTENT_TYPE_UPDATE_CONTENT"])
space_key = args.get('space_key', '')
if not is_update and not space_key:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("space_key"))
body_value = args.get('body_value', '')
body_representation = args.get('body_representation', '')
if content_type == "comment":
if body_value and body_representation:
if body_representation not in LEGAL_BODY_REPRESENTATION:
raise ValueError(MESSAGES["INVALID_BODY_REPRESENTATION"])
else:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("'body_value' and 'body_representation'"))
def prepare_hr_for_content_create(content: Dict[str, Any], content_type: str) -> str:
"""
Prepare human readable for content create, comment create and content update command.
:type content: ``Dict[str, Any]``
:param content:The content data.
:type content_type: ``str``
:param content_type: Type of the content.
:rtype: ``str``
:return: Human readable.
"""
hr_record = {
'ID': content.get('id', ''),
'Title': f"[{content.get('title', '')}]"
f"({content.get('_links', {}).get('base', '')}{content.get('_links', {}).get('webui', '')})",
'Type': content.get('type', ''),
'Status': content.get('status', ''),
'Space Name': content.get('space', {}).get('name', ''),
'Created By': content.get('history', {}).get('createdBy', {}).get('displayName', ''),
'Created At': content.get('history', {}).get('createdDate', '')
}
return tableToMarkdown(f'{content_type}', hr_record,
['ID', 'Title', 'Type', 'Status', 'Space Name', 'Created By', 'Created At'],
removeNull=True)
def prepare_hr_for_content_search(contents: list, url_prefix: str) -> str:
"""
Prepare human readable for content search and content list command.
:type contents: ``list``
:param contents: List of content.
:type url_prefix: ``str``
:param url_prefix: Url prefix the content.
:rtype: ``str``
:return: Human readable.
"""
hr_list = []
for content in contents:
hr_record = {
'ID': content.get('id', ''),
'Title': f"[{content.get('title', '')}]"
f"({url_prefix}{content.get('_links', {}).get('webui', '')})",
'Type': content.get('type', ''),
'Status': content.get('status', ''),
'Space Name': content.get('space', {}).get('name', ''),
'Created By': content.get('history', {}).get('createdBy', {}).get('displayName', ''),
'Created At': content.get('history', {}).get('createdDate', ''),
'Version': content.get('version', {}).get('number', '')
}
hr_list.append(hr_record)
hr = tableToMarkdown('Content(s)', hr_list,
['ID', 'Title', 'Type', 'Status', 'Space Name', 'Created By', 'Created At', 'Version'],
removeNull=True)
return hr
def validate_delete_content_args(args: Dict[str, str]):
"""
Validate arguments for confluence-cloud-content-delete command, raise ValueError on invalid arguments.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: None
"""
content_id = args["content_id"]
if not content_id:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("content_id"))
status = args.get("deletion_type", "").lower()
if status:
if status not in LEGAL_DELETION_TYPES.keys():
raise ValueError(MESSAGES["INVALID_DELETION_TYPE"])
def prepare_comment_create_params(args) -> Dict[str, Any]:
"""
Prepare json object for comment create command
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Body parameters to send in request
:rtype: ``Dict[str, Any]``
"""
body_representation = args['body_representation']
container_type = args.get('container_type', '')
params = {
"type": "comment",
"status": args.get('status', 'current'),
"container": {
"id": args['container_id'],
"type": container_type
},
"body": {
body_representation: {
"value": args['body_value'],
"representation": body_representation
}
},
"ancestors": [
{
"id": args.get('ancestor_id', '')
}
]
}
params = remove_empty_elements_for_context(params)
params["container"]["type"] = container_type
return params
def validate_comment_args(args: Dict[str, str]):
"""
Validate arguments for confluence-cloud-comment-create command, raise ValueError on invalid arguments.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: None
"""
body_value = args['body_value']
if not body_value:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("Comment body_value"))
body_representation = args['body_representation']
if not body_representation:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("body_representation"))
if body_representation not in LEGAL_BODY_REPRESENTATION:
raise ValueError(MESSAGES["INVALID_BODY_REPRESENTATION"])
container_id = args['container_id']
if not container_id:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("container_id"))
def prepare_hr_for_users(users: List[Dict[str, Any]]) -> str:
"""
Prepare human readable for list users command.
:type users: ``List[Dict[str, Any]]``
:param users: The user data.
:rtype: ``str``
:return: Human readable.
"""
hr_list = []
for user in users:
hr_record = {
'Account ID': user['user'].get('accountId', ''),
'Name': user['user'].get('displayName', ''),
'User Type': user['user'].get('type', '')
}
hr_list.append(hr_record)
return tableToMarkdown('User(s)', hr_list, ['Account ID', 'Name', 'User Type'], removeNull=True)
def prepare_expand_argument(expand: str, default_fields: str) -> str:
"""
The 'expand' command argument specifies which properties should be expanded.
In this integration, several of the most significant characteristics are extended by default.
Other attributes that users want to expand can still be provided.
This method combines the default expand fields with the expand fields specified by the user.
:type expand: ``str``
:param expand: The expand argument passed by the user.
:type default_fields: ``str``
:param default_fields: The default fields.
:return: expand argument value to send in request
:rtype: ``str``
"""
default_expand_fields = default_fields.split(",")
custom_expand_fields = set(expand.split(","))
expand_fields = ""
for expand_field in custom_expand_fields:
if expand_field.strip() not in default_expand_fields:
expand_fields += f',{expand_field.strip()}'
return default_fields + expand_fields
def validate_query_argument(args: Dict[str, str]):
"""
Validate query argument of content search command
:param args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: None
"""
query = args['query']
if not query:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("query"))
def prepare_search_content_argument(args: Dict[str, str]) -> Dict[str, Any]:
"""
Prepare params for confluence-cloud-content-search command.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Parameters to send in request
:rtype: ``Dict[str, Any]``
"""
limit, offset = validate_list_command_args(args)
validate_query_argument(args)
params = {'cql': args['query'],
'cursor': args.get('next_page_token'),
'expand': DEFAULT_EXPANDED_FIELD_CONTENT,
'limit': limit
}
expand = args.get('expand', '')
if expand:
params['expand'] = prepare_expand_argument(expand, DEFAULT_EXPANDED_FIELD_CONTENT)
content_status = argToList(args.get('content_status', ''))
params["cqlcontext"] = json.dumps({"contentStatuses": content_status})
return assign_params(**params)
def prepare_cursor_for_content(response_json: Dict[str, str]) -> str:
"""
Split query string parameters from a link and extract value of parameter 'cursor'.
:type response_json: ``Dict[str, str]``
:param response_json: API response.
:return: Next Page Token(Cursor).
:rtype: ``str``
"""
next_cursor = ""
next_record = response_json.get('_links', {}).get('next', '') # type:ignore
if next_record:
next_cursor_split = next_record.split('?')
parsed_next_cursor = urllib.parse.parse_qs(next_cursor_split[1])
next_cursor = parsed_next_cursor.get('cursor', [])[0]
return next_cursor
def validate_list_content_args(args):
"""
Validate arguments for confluence_cloud_content_list command, raise ValueError on invalid arguments.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: None
"""
sort_order = args.get('sort_order', '').lower()
sort_key = args.get('sort_key', '')
if sort_order and not sort_key:
raise ValueError(MESSAGES['REQUIRED_SORT_KEY'])
content_type = args.get('type', 'page').lower()
if content_type not in LEGAL_CONTENT_TYPES:
raise ValueError(MESSAGES['INVALID_CONTENT_TYPE'])
status = args.get('status', '').lower()
if status and status not in LEGAL_CONTENT_STATUS:
raise ValueError(MESSAGES['INVALID_STATUS_SEARCH'])
def prepare_list_content_argument(args: Dict[str, str]) -> Dict[str, Any]:
"""
Prepare params for confluence_cloud_content_list command.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Parameters to send in request
:rtype: ``Dict[str, Any]``
"""
validate_list_content_args(args)
limit, offset = validate_list_command_args(args)
params = {'limit': limit,
'start': offset,
'spaceKey': args.get('space_key', ''),
'type': args.get('type', 'page').lower()
}
sort_order = args.get('sort_order', '').lower()
sort_key = args.get('sort_key', '')
if sort_order and sort_key:
params['orderby'] = f'{sort_key} {sort_order}'
elif sort_key:
params['orderby'] = f'{sort_key}'
content_creation_date = arg_to_datetime(args.get('creation_date'))
if content_creation_date:
params['postingDay'] = content_creation_date.date() # type: ignore
params['status'] = args.get('status', '').lower()
params['expand'] = DEFAULT_EXPANDED_FIELD_CONTENT
expand = args.get('expand', '')
if expand:
params['expand'] = prepare_expand_argument(expand, DEFAULT_EXPANDED_FIELD_CONTENT)
return assign_params(**params)
def validate_create_space_args(args: Dict[str, str]):
"""
Validate arguments for confluence-cloud-space-create command, raise ValueError on invalid arguments.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: None
"""
unique_key = args.get('unique_key')
if not unique_key:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("unique_key"))
if len(unique_key) > 255 or not unique_key.isalnum():
raise ValueError(MESSAGES["INVALID_SPACE_KEY"])
name = args.get('name')
if not name:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("name"))
if len(name) > 200:
raise ValueError(MESSAGES["INVALID_SPACE_NAME_LENGTH"])
is_private_space = argToBoolean(args.get('is_private_space', False))
if is_private_space:
if args.get('advanced_permissions') or args.get('permission_operations'):
raise ValueError(MESSAGES["PRIVATE_SPACE_PERMISSION"])
if args.get('advanced_permissions'):
try:
json.loads(args['advanced_permissions'])
except (json.JSONDecodeError, json.decoder.JSONDecodeError, AttributeError):
raise ValueError(MESSAGES["ADVANCE_PERMISSION_FORMAT"])
def prepare_create_space_args(args: Dict[str, str]) -> Tuple[dict, Union[bool, str]]:
"""
Prepare json object for confluence-cloud-space-create command.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Parameters to send in request
:rtype: ``Dict[str, Any]``
"""
is_private_space = argToBoolean(args.get('is_private_space', False))
if args.get('advanced_permissions'):
permissions = json.loads(args['advanced_permissions'])
else:
permissions = validate_permissions(args)
params = {
"key": args['unique_key'],
"name": args['name'],
"description": {
"plain": {
"value": args.get('description', ''),
"representation": "plain"
}
},
"permissions": permissions
}
params = remove_empty_elements_for_context(params)
return params, is_private_space
def prepare_hr_for_space_create(space: Dict[str, Any]) -> str:
"""
Prepare human readable for create space command.
:type space: ``List[Dict[str, Any]]``
:param space: The space data.
:rtype: ``str``
:return: Human readable.
"""
hr_record = {
'ID': space.get('id', ''),
'Name': f"[{space.get('name', '')}]"
f"({space.get('_links', {}).get('base', '')}{space.get('_links', {}).get('webui', '')})",
'Type': space.get('type', ''),
'Status': space.get('status', ''),
}
return tableToMarkdown('Space', hr_record,
['ID', 'Name', 'Type', 'Status'],
removeNull=True)
def validate_status_argument(args: Dict[str, str]):
"""
Validates the status argument of confluence-cloud-space-list command, raise ValueError on invalid arguments.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: None
"""
status = args.get('status')
if status and status.lower() not in LEGAL_SPACE_STATUS:
raise ValueError(MESSAGES["INVALID_SPACE_STATUS"])
def prepare_list_space_args(args: Dict[str, str]) -> Dict[str, Any]:
"""
Prepare params for confluence-cloud-space-list command.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Parameters to send in request
:rtype: ``Dict[str, Any]``
"""
validate_status_argument(args)
limit, offset = validate_list_command_args(args)
params = {'limit': limit, 'start': offset,
'spaceKey': argToList(args.get('space_key')),
'spaceId': argToList(args.get('space_id')),
'type': args.get('type'),
'status': args.get('status')
}
favourite = args.get('favourite', '')
if favourite:
favourite = "true" if argToBoolean(favourite) else "false"
params['favourite'] = favourite
params['expand'] = DEFAULT_EXPANDED_FIELD_SPACE
expand = args.get('expand', '')
if expand:
params['expand'] = prepare_expand_argument(expand, DEFAULT_EXPANDED_FIELD_SPACE)
return assign_params(**params)
def prepare_hr_for_space_list(spaces: List[Dict[str, Any]], url_prefix: str) -> str:
"""
Prepare human readable for list space command.
:param url_prefix:
:type spaces: ``List[Dict[str, Any]]``
:param spaces: The space data.
:rtype: ``str``
:return: Human readable.
"""
hr_list = []
for space in spaces:
hr_record = {
'ID': space.get('id', ''),
'Space Key': space.get('key', ''),
'Name': f"[{space.get('name', '')}]"
f"({url_prefix}{space.get('_links', {}).get('webui', '')})",
'Type': space.get('type', ''),
'Status': space.get('status', ''),
'Created By': space.get('history', {}).get('createdBy', {}).get('displayName', ''),
'Created At': space.get('history', {}).get('createdDate', '')
}
hr_list.append(hr_record)
hr = tableToMarkdown('Space(s)', hr_list,
['ID', 'Space Key', 'Name', 'Type', 'Status', 'Created By', 'Created At'], removeNull=True)
return hr
def validate_update_content_args(args: Dict[str, str]):
"""
Validate arguments for confluence-cloud-content-update command, raise ValueError on invalid arguments.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: None
"""
validate_create_content_args(args, is_update=True)
content_id = args["content_id"]
if not content_id:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("content_id"))
version = args["version"]
if not version:
raise ValueError(MESSAGES["REQUIRED_ARGUMENT"].format("version"))
''' COMMAND FUNCTIONS '''
def test_module(client: Client) -> str:
"""Tests API connectivity and authentication'
Returning 'ok' indicates that the integration works like it is supposed to.
Connection to the service is successful.
Raises exceptions if something goes wrong.
:type client: ``Client``
:param client: client to use
:return: 'ok' if test passed, anything else will fail the test.
:rtype: ``str``
"""
params: Dict = {
"cql": "type=page",
"limit": 1
}
client.http_request(method='GET', url_suffix=URL_SUFFIX["CONTENT_SEARCH"], params=params)
return 'ok'
def confluence_cloud_user_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Returns a list of users.
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
limit, offset = validate_list_command_args(args)
params = assign_params(limit=limit, start=offset)
response = client.http_request(method="GET", url_suffix=URL_SUFFIX["USER"], params=params)
response_json = response.json()
total_records = response_json.get('results', [])
if not total_records:
return CommandResults(readable_output=MESSAGES['NO_RECORDS_FOUND'].format('user(s)'))
context = []
for user in total_records:
context.append(remove_empty_elements_for_context(user.get('user', {})))
readable_hr = prepare_hr_for_users(total_records)
return CommandResults(
outputs_prefix=OUTPUT_PREFIX['USER'],
outputs_key_field='accountId',
outputs=context,
readable_output=readable_hr,
raw_response=response_json)
def confluence_cloud_content_search_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Returns the list of content that matches a Confluence Query Language (CQL) query.
The type of content can be a page, blogpost, or comment.
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
params = prepare_search_content_argument(args)
response = client.http_request(method="GET", url_suffix=URL_SUFFIX["CONTENT_SEARCH"], params=params)
response_json = response.json()
total_records = response_json.get('results', [])
if not total_records:
return CommandResults(readable_output=MESSAGES['NO_RECORDS_FOUND'].format('content(s)'))
# Creating Context data
context = remove_empty_elements_for_context(total_records)
next_cursor = prepare_cursor_for_content(response_json)
next_page_context = {
"next_page_token": next_cursor,
"name": "confluence-cloud-content-search"
}
next_page_context = remove_empty_elements_for_context(next_page_context)
outputs = {
f"{OUTPUT_PREFIX['CONTENT']}(val.id == obj.id)": context,
f"{OUTPUT_PREFIX['PAGETOKEN']}(val.name == obj.name)": next_page_context
}
# Creating Human Readable
url_prefix = response_json.get('_links', {}).get('base', '')
readable_hr = prepare_hr_for_content_search(total_records, url_prefix)
if next_cursor:
readable_hr += f'Run the command with argument next_page_token={next_cursor} to see the next set of contents.\n'
return CommandResults(
outputs=outputs,
readable_output=readable_hr,
raw_response=response_json)
def confluence_cloud_content_update_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Update the existing content with new content.
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
validate_update_content_args(args)
content_id = args["content_id"]
params = prepare_content_create_params(args)
params["version"] = {
"number": args["version"]
}
request_url = URL_SUFFIX["CONTENT"] + "/{}".format(content_id)
response = client.http_request(method="PUT", url_suffix=request_url, json_data=params)
response_json = response.json()
context = remove_empty_elements_for_context(response_json)
readable_hr = prepare_hr_for_content_create(response_json, "Content")
return CommandResults(
outputs_prefix=OUTPUT_PREFIX['CONTENT'],
outputs_key_field='id',
outputs=context,
readable_output=readable_hr,
raw_response=response_json)
def confluence_cloud_content_delete_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
This command moves a piece of content to the space's trash or purges it from the trash,
depending on the content's type and status.
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
validate_delete_content_args(args)
content_id = args["content_id"]
status = args.get("deletion_type", "").lower()
params = assign_params(status=LEGAL_DELETION_TYPES.get(status))
request_url = URL_SUFFIX["CONTENT"] + "/{}".format(content_id)
client.http_request(method="DELETE", url_suffix=request_url, params=params)
return CommandResults(readable_output=MESSAGES["HR_DELETE_CONTENT"].format(content_id))
def confluence_cloud_content_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Returns the list of contents of confluence.
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
params = prepare_list_content_argument(args)
response = client.http_request(method="GET", url_suffix=URL_SUFFIX["CONTENT"], params=params)
response_json = response.json()
total_records = response_json.get('results', [])
if not total_records:
return CommandResults(readable_output=MESSAGES['NO_RECORDS_FOUND'].format('content(s)'))
context = remove_empty_elements_for_context(total_records)
url_prefix = response_json.get('_links', {}).get('base', '')
readable_hr = prepare_hr_for_content_search(total_records, url_prefix)
return CommandResults(
outputs_prefix=OUTPUT_PREFIX['CONTENT'],
outputs_key_field="id",
outputs=context,
readable_output=readable_hr,
raw_response=response_json)
def confluence_cloud_space_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Returns a list of all Confluence spaces.
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
params = prepare_list_space_args(args)
response = client.http_request(method="GET", url_suffix=URL_SUFFIX["SPACE"], params=params)
response_json = response.json()
total_records = response_json.get('results', [])
if not total_records:
return CommandResults(readable_output=MESSAGES['NO_RECORDS_FOUND'].format('space(s)'))
context = remove_empty_elements_for_context(total_records)
url_prefix = response_json.get('_links', {}).get('base', '')
readable_hr = prepare_hr_for_space_list(total_records, url_prefix)
return CommandResults(
outputs_prefix=OUTPUT_PREFIX['SPACE'],
outputs_key_field='id',
outputs=context,
readable_output=readable_hr,
raw_response=response_json)
def confluence_cloud_comment_create_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Creates a comment for a given content.
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
validate_comment_args(args)
params = prepare_comment_create_params(args)
response = client.http_request(method="POST", url_suffix=URL_SUFFIX["CONTENT"], json_data=params)
response_json = response.json()
context = remove_empty_elements_for_context(response_json)
readable_hr = prepare_hr_for_content_create(response_json, "Comment")
return CommandResults(
outputs_prefix=OUTPUT_PREFIX['COMMENT'],
outputs_key_field='id',
outputs=context,
readable_output=readable_hr,
raw_response=response_json)
def confluence_cloud_content_create_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Create a page or blogpost for a specified space .
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
validate_create_content_args(args)
params = prepare_content_create_params(args)
response = client.http_request(method="POST", url_suffix=URL_SUFFIX["CONTENT"], json_data=params)
response_json = response.json()
context = remove_empty_elements_for_context(response_json)
readable_hr = prepare_hr_for_content_create(response_json, "Content")
return CommandResults(
outputs_prefix=OUTPUT_PREFIX['CONTENT'],
outputs_key_field='id',
outputs=context,
readable_output=readable_hr,
raw_response=response_json)
def confluence_cloud_space_create_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Creates a new space in confluence cloud.
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
validate_create_space_args(args)
params, is_private_space = prepare_create_space_args(args)
url_suffix = URL_SUFFIX["SPACE"]
if is_private_space:
url_suffix = URL_SUFFIX["PRIVATE_SPACE"]
if 'permissions' in params.keys():
del params['permissions']
response = client.http_request(method="POST", url_suffix=url_suffix, json_data=params)
response_json = response.json()
# Creating the Context data
context = remove_empty_elements_for_context(response_json)
# Creating the Human Readable
readable_hr = prepare_hr_for_space_create(response_json)
return CommandResults(
outputs_prefix=OUTPUT_PREFIX['SPACE'],
outputs_key_field='id',
outputs=context,
readable_output=readable_hr,
raw_response=response_json
)
def confluence_cloud_group_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Retrieves the list of groups.
:type client: ``Client``
:param client: Client object to be used.
:type args: ``Dict[str, str]``
:param args: The command arguments provided by the user.
:return: Standard command result or no records found message.
:rtype: ``CommandResults``
"""
params = prepare_group_args(args)
response = client.http_request(method="GET", url_suffix=URL_SUFFIX["GROUP"], params=params)
response_json = response.json()
total_records = response_json.get('results', [])
if not total_records:
return CommandResults(readable_output=MESSAGES['NO_RECORDS_FOUND'].format('group(s)'))
context = remove_empty_elements(total_records)
readable_hr = prepare_hr_for_groups(total_records)
return CommandResults(
outputs_prefix=OUTPUT_PREFIX['GROUP'],
outputs_key_field='id',
outputs=context,
readable_output=readable_hr,
raw_response=response_json)
''' MAIN FUNCTION '''
def main() -> None:
"""
main function, parses params and runs command functions
"""
params = demisto.params()
# get the service API url
url = params['url'].strip()
base_url = "https://{}.atlassian.net".format(url)
verify_certificate = not params.get('insecure', False)
proxy = params.get('proxy', False)
credentials = params.get("username", {})
username = credentials.get('identifier').strip()
password = credentials.get('password')
demisto.debug(f'{LOGGING_INTEGRATION_NAME} Command being called is {demisto.command()}')
try:
validate_url(url)
headers: Dict = {
"Accept": "application/json"
}
client = Client(
base_url=base_url,
verify=verify_certificate,
proxy=proxy,
headers=headers,
auth=(username, password)
)
# Commands dictionary
commands: Dict[str, Callable] = {
'confluence-cloud-group-list': confluence_cloud_group_list_command,
'confluence-cloud-user-list': confluence_cloud_user_list_command,
'confluence-cloud-content-search': confluence_cloud_content_search_command,
'confluence-cloud-content-update': confluence_cloud_content_update_command,
'confluence-cloud-content-delete': confluence_cloud_content_delete_command,
'confluence-cloud-content-list': confluence_cloud_content_list_command,
'confluence-cloud-space-list': confluence_cloud_space_list_command,
'confluence-cloud-comment-create': confluence_cloud_comment_create_command,
'confluence-cloud-content-create': confluence_cloud_content_create_command,
'confluence-cloud-space-create': confluence_cloud_space_create_command
}
command = demisto.command()
args = demisto.args()
strip_args(args)
remove_nulls_from_dictionary(args)
if command == 'test-module':
# This is the call made when pressing the integration Test button.
return_results(test_module(client))
elif command in commands:
return_results(commands[command](client, args))
# Log exceptions and return errors
except Exception as e:
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Integrations/AWSSNS/AWSSNS.py
```python
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
def create_entry(title, data, ec):
return {
'ContentsFormat': formats['json'],
'Type': entryTypes['note'],
'Contents': data,
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(title, data) if data else 'No result were found',
'EntryContext': ec
}
def raise_error(error):
return {
'Type': entryTypes['error'],
'ContentsFormat': formats['text'],
'Contents': str(error)
}
def create_subscription(args, client):
try:
attributes = {}
kwargs = {
'TopicArn': args.get('topicArn'),
'Protocol': args.get('protocol')
}
if args.get('endpoint') is not None:
kwargs.update({'Endpoint': args.get('endpoint')})
if args.get('returnSubscriptionArn') is not None:
kwargs.update({'ReturnSubscriptionArn': bool(args.get('returnSubscriptionArn'))})
if args.get('deliveryPolicy') is not None:
attributes.update({'DeliveryPolicy': args.get('deliveryPolicy')})
if args.get('filterPolicy') is not None:
attributes.update({'FilterPolicy': args.get('filterPolicy')})
if args.get('rawMessageDelivery') is not None:
attributes.update({'RawMessageDelivery': args.get('rawMessageDelivery')})
if args.get('redrivePolicy') is not None:
attributes.update({'RedrivePolicy': args.get('RedrivePolicy')})
if args.get('subscriptionRoleArn') is not None:
attributes.update({'SubscriptionRoleArn': args.get('subscriptionRoleArn')})
if attributes:
kwargs.update({'Attributes': attributes})
response = client.subscribe(**kwargs)
data = {'SubscriptionArn': response['SubscriptionArn']}
ec = {'AWS.SNS.Subscriptions': data}
return create_entry('AWS SNS Subscriptions', data, ec)
except Exception as e:
return raise_error(e)
def list_topics(args, client):
try:
data = []
kwargs = {}
if args.get('nextToken') is not None:
kwargs.update({'NextToken': args.get('nextToken')})
response = client.list_topics(**kwargs)
for topic in response['Topics']:
data.append({'TopicArn': topic})
ec = {'AWS.SNS.Topics': data}
return create_entry('AWS SNS Topics', data, ec)
except Exception as e:
return raise_error(e)
def list_subscriptions_by_topic(args, client):
try:
data = []
kwargs = {}
if args.get('topicArn') is not None:
kwargs.update({'TopicArn': args.get('topicArn')})
if args.get('nextToken') is not None:
kwargs.update({'NextToken': args.get('nextToken')})
response = client.list_subscriptions_by_topic(**kwargs)
for subscription in response['Subscriptions']:
data.append({'SubscriptionArn': subscription['SubscriptionArn']})
ec = {'AWS.SNS.Subscriptions': data}
return create_entry('AWS SNS Subscriptions', data, ec)
except Exception as e:
return raise_error(e)
def send_message(args, client):
try:
data = []
kwargs = {
'Message': args.get('message')
}
if args.get('topicArn') is not None:
kwargs.update({'TopicArn': args.get('topicArn')})
if args.get('targetArn') is not None:
kwargs.update({'TargetArn': args.get('targetArn')})
if args.get('phoneNumber') is not None:
kwargs.update({'PhoneNumber': args.get('phoneNumber')})
if args.get('subject') is not None:
kwargs.update({'Subject': args.get('subject')})
if args.get('messageStructure') is not None:
kwargs.update({'MessageStructure': args.get('messageStructure')})
if args.get('messageDeduplicationId') is not None:
kwargs.update({'MessageDeduplicationId': args.get('messageDeduplicationId')})
if args.get('messageGroupId') is not None:
kwargs.update({'MessageGroupId': args.get('messageGroupId')})
response = client.publish(**kwargs)
data.append({'MessageId': response['MessageId']})
ec = {'AWS.SNS.SentMessages': data}
return create_entry('AWS SNS sent messages', data, ec)
except Exception as e:
return raise_error(e)
def create_topic(args, client):
try:
attributes = {}
kwargs = {'Name': args.get('topicName')}
if args.get('deliveryPolicy') is not None:
attributes.update({'DeliveryPolicy': args.get('deliveryPolicy')})
if args.get('displayName') is not None:
attributes.update({'DisplayName': args.get('displayName')})
if args.get('fifoTopic') is not None:
attributes.update({'FifoTopic': bool(args.get('fifoTopic'))})
if args.get('policy') is not None:
attributes.update({'policy': args.get('Policy')})
if args.get('kmsMasterKeyId') is not None:
attributes.update({'KmsMasterKeyId': args.get('kmsMasterKeyId')})
if args.get('contentBasedDeduplication') is not None:
attributes.update({'ContentBasedDeduplication': args.get('contentBasedDeduplication')})
if attributes:
kwargs.update({'Attributes': attributes})
response = client.create_topic(**kwargs)
data = {'ARN': response['TopicArn']}
ec = {'AWS.SNS.Topic': data}
return create_entry('AWS SNS Topic', data, ec)
except Exception as e:
return raise_error(e)
def delete_topic(args, client):
try:
response = client.delete_topic(TopicArn=args.get('topicArn'))
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
return 'The Topic has been deleted'
except Exception as e:
return raise_error(e)
def test_function(aws_client):
try:
client = aws_client.aws_session(service='sns')
response = client.list_topics()
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
return "ok"
except Exception as e:
return raise_error(e)
def main():
params = demisto.params()
aws_default_region = params.get('defaultRegion')
aws_role_arn = params.get('roleArn')
aws_role_session_name = params.get('roleSessionName')
aws_role_session_duration = params.get('sessionDuration')
aws_role_policy = None
aws_access_key_id = params.get('access_key')
aws_secret_access_key = params.get('secret_key')
verify_certificate = not params.get('insecure', False)
timeout = params.get('timeout')
retries = params.get('retries') or 5
commands = {
'aws-sns-create-subscription': create_subscription,
'aws-sns-list-topics': list_topics,
'aws-sns-list-subscriptions-by-topic': list_subscriptions_by_topic,
'aws-sns-send-message': send_message,
'aws-sns-create-topic': create_topic,
'aws-sns-delete-topic': delete_topic
}
try:
validate_params(aws_default_region, aws_role_arn, aws_role_session_name, aws_access_key_id,
aws_secret_access_key)
aws_client = AWSClient(aws_default_region, aws_role_arn, aws_role_session_name, aws_role_session_duration,
aws_role_policy, aws_access_key_id, aws_secret_access_key, verify_certificate, timeout,
retries)
command = demisto.command()
args = demisto.args()
demisto.debug('Command being called is {}'.format(command))
if command == 'test-module':
return_results(test_function(aws_client))
elif command in commands:
client = aws_client.aws_session(
service='sns',
region=args.get('region'),
role_arn=args.get('roleArn'),
role_session_name=args.get('roleSessionName'),
role_session_duration=args.get('roleSessionDuration'))
return_results(commands[command](args, client))
else:
raise NotImplementedError('{} is not an existing AWS-SNS command'.format(command))
except Exception as e:
return_error("Failed to execute {} command.\nError:\n{}".format(demisto.command(), str(e)))
from AWSApiModule import * # noqa: E402
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Integrations/AzureFirewall/AzureFirewall.py
```python
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import copy
from requests import Response
class AzureFirewallClient:
def __init__(self, subscription_id: str,
resource_group: str,
client_id: str,
api_version: str,
verify: bool,
proxy: bool,
client_secret: str = None,
tenant_id: str = None,
certificate_thumbprint: str = None,
private_key: str = None):
self.resource_group = resource_group
self.subscription_id = subscription_id
self.api_version = api_version
self.default_params = {"api-version": api_version}
is_credentials = (client_secret and tenant_id) or (certificate_thumbprint and private_key)
scope = Scopes.management_azure if is_credentials else \
'https://management.azure.com/user_impersonation offline_access user.read'
grant_type = CLIENT_CREDENTIALS if is_credentials else DEVICE_CODE
token_retrieval_url = f'https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/token' if tenant_id \
else 'https://login.microsoftonline.com/organizations/oauth2/v2.0/token'
if not is_credentials:
client_secret = None
tenant_id = None
certificate_thumbprint = None
private_key = None
self.ms_client = MicrosoftClient(
self_deployed=True,
tenant_id=tenant_id,
token_retrieval_url=token_retrieval_url,
auth_id=client_id,
enc_key=client_secret,
grant_type=grant_type,
base_url=f'https://management.azure.com/subscriptions/{subscription_id}'
f'/resourceGroups/{resource_group}/providers/Microsoft.Network',
scope=scope,
verify=verify,
proxy=proxy,
certificate_thumbprint=certificate_thumbprint,
private_key=private_key
)
def azure_firewall_list_request(self, resource: str, next_link: str = None) -> dict:
"""
List azure firewalls in resource group or subscription.
Args:
resource (str): The resource which contains the firewalls to list.
next_link (str): URL to retrieve the next set of results.
Returns:
dict: API response from Azure.
"""
if next_link:
full_url = next_link
response = self.ms_client.http_request('GET', full_url=full_url, resp_type="json", timeout=100)
return response
if resource == "resource_group":
full_url = f'https://management.azure.com/subscriptions/{self.subscription_id}' \
f'/resourceGroups/{self.resource_group}/providers/Microsoft.Network/azureFirewalls'
else:
full_url = f'https://management.azure.com/subscriptions/{self.subscription_id}' \
f'/providers/Microsoft.Network/azureFirewalls'
response = self.ms_client.http_request('GET', full_url=full_url, params=self.default_params, resp_type="json",
timeout=100)
return response
def azure_firewall_get_request(self, firewall_name: str) -> dict:
"""
Retrieve azure firewall information.
Args:
firewall_name (str): The name of the azure firewall to retrieve.
Returns:
dict: API response from Azure.
"""
url_suffix = f'azureFirewalls/{firewall_name}'
response = self.ms_client.http_request('GET', url_suffix=url_suffix, params=self.default_params,
resp_type="json", timeout=100)
return response
def azure_firewall_update_request(self, firewall_name: str, firewall_data: dict) -> dict:
"""
Update firewall resource.
Args:
firewall_name (str): The name of the firewall to update.
firewall_data (dict): Firewall resource JSON information.
Returns:
dict: API response from Azure.
"""
url_suffix = f'azureFirewalls/{firewall_name}'
response = self.ms_client.http_request('PUT', url_suffix=url_suffix, params=self.default_params,
json_data=firewall_data,
resp_type="json", timeout=100)
return response
def azure_firewall_policy_create_request(self, policy_name: str, threat_intelligence_mode: str, ip_address: list,
domain_address: list, location: str, tier: str, base_policy_id: str,
enable_proxy: bool, dns_servers: list) -> dict:
"""
Create firewall policy.
Args:
policy_name (str): The name of the azure policy to create.
threat_intelligence_mode (str): The operation mode for Threat Intelligence.
ip_address (list): IP addresses for the threat intelligence whitelist.
domain_address (list): Fully qualified domain name for the threat intelligence whitelist.
location (str): Policy resource region location.
tier (str): Tier of an Azure Policy.
base_policy_id (str): The ID of the parent firewall policy from which rules are inherited.
enable_proxy (bool): Enable DNS Proxy on Firewalls attached to the Firewall Policy.
dns_servers (list): Custom DNS Servers.
Returns:
dict: API response from Azure.
"""
data = remove_empty_elements({
"location": location,
"properties": {
"threatIntelMode": threat_intelligence_mode,
"threatIntelWhitelist": {
"ipAddresses": ip_address,
"fqdns": domain_address
},
"snat": {
"privateRanges": None
},
"dnsSettings": {
"servers": dns_servers,
"enableProxy": enable_proxy
},
"basePolicy": {"id": base_policy_id},
"sku": {
"tier": tier
}
}
})
url_suffix = f'firewallPolicies/{policy_name}'
response = self.ms_client.http_request('PUT', url_suffix=url_suffix, params=self.default_params, json_data=data,
resp_type="json", timeout=100)
return response
def azure_firewall_policy_update_request(self, policy_name: str, policy_data: dict) -> dict:
"""
Update policy resource.
Args:
policy_name (str): The name of the policy resource to update.
policy_data (dict): Policy resource JSON information.
Returns:
dict: API response from Azure.
"""
url_suffix = f'firewallPolicies/{policy_name}'
response = self.ms_client.http_request('PUT', url_suffix=url_suffix, params=self.default_params,
json_data=policy_data,
resp_type="json", timeout=100)
return response
def azure_firewall_policy_get_request(self, policy_name: str) -> dict:
"""
Retrieve policy information.
Args:
policy_name (str): The name of the policy to retrieve.
Returns:
dict: API response from Azure.
"""
url_suffix = f'firewallPolicies/{policy_name}'
response = self.ms_client.http_request('GET', url_suffix=url_suffix, params=self.default_params,
resp_type="json", timeout=100)
return response
def azure_firewall_policy_delete_request(self, policy_name: str) -> Response:
"""
Delete policy resource.
Args:
policy_name (str): The name of the policy to delete.
Returns:
Response: API response from Azure.
"""
url_suffix = f'firewallPolicies/{policy_name}'
response = self.ms_client.http_request('DELETE', url_suffix=url_suffix, params=self.default_params,
resp_type="response", timeout=100)
return response
def azure_firewall_policy_list_request(self, resource: str, next_link: str = None) -> dict:
"""
List policies in resource group or subscription.
Args:
resource (str): The resource which contains the policy to list.
next_link (str): URL to retrieve the next set of results.
Returns:
dict: API response from Azure.
"""
if next_link:
full_url = next_link
response = self.ms_client.http_request('GET', full_url=full_url, resp_type="json", timeout=100)
return response
if resource == "resource_group":
full_url = f'https://management.azure.com/subscriptions/{self.subscription_id}' \
f'/resourceGroups/{self.resource_group}/providers/Microsoft.Network/firewallPolicies'
else:
full_url = f'https://management.azure.com/subscriptions/{self.subscription_id}' \
f'/providers/Microsoft.Network/firewallPolicies'
response = self.ms_client.http_request('GET', full_url=full_url, params=self.default_params, resp_type="json",
timeout=100)
return response
def azure_firewall_policy_rule_collection_create_or_update_request(self, policy_name: str, collection_name: str,
collection_data: dict) -> dict:
"""
Create or update policy rule collection.
Args:
policy_name (str): The name of the policy which contains the collection.
collection_name (str): The name of the rule collection to create or update.
collection_data (dict): Rule collection information.
Returns:
dict: API response from Azure.
"""
url_suffix = f'firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
response = self.ms_client.http_request('PUT', url_suffix=url_suffix, params=self.default_params,
resp_type="json", json_data=collection_data, timeout=100)
return response
def azure_firewall_policy_rule_collection_list_request(self, policy_name: str, next_link: str = None) -> dict:
"""
List collection rules in policy.
Args:
policy_name (str): The resource which contains the policy to list.
next_link (str): URL to retrieve the next set of results.
Returns:
dict: API response from Azure.
"""
if next_link:
full_url = next_link
response = self.ms_client.http_request('GET', full_url=full_url, resp_type="json", timeout=100)
return response
url_suffix = f'firewallPolicies/{policy_name}/ruleCollectionGroups'
response = self.ms_client.http_request('GET', url_suffix=url_suffix, params=self.default_params,
resp_type="json", timeout=100)
return response
def azure_firewall_policy_rule_collection_get_request(self, policy_name: str,
collection_name: str) -> dict:
"""
Retrieve policy collection group information.
Args:
policy_name (str): The name of the policy which contains the collection.
collection_name (str): he name of the policy rule collection to retrieve.
Returns:
dict: API response from Azure.
"""
url_suffix = f'firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
response = self.ms_client.http_request('GET', url_suffix=url_suffix, params=self.default_params,
resp_type="json", timeout=100)
return response
def azure_firewall_policy_rule_collection_delete_request(self, policy_name: str, collection_name: str) -> Response:
"""
Delete policy collection group information.
Args:
policy_name (str): The name of the policy which contains the collection.
collection_name (str): The name of the policy rule collection to delete.
Returns:
Response: API response from Azure.
"""
url_suffix = f'firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
response = self.ms_client.http_request('DELETE', url_suffix=url_suffix, params=self.default_params,
resp_type="response", timeout=100)
return response
def azure_firewall_policy_network_rule_collection_create_request(self, policy_name: str, collection_priority: int,
collection_name: str, action: str,
rule_information: dict) -> dict:
"""
Create network rule collection in firewall or policy.
Args:
policy_name (str): The name of the policy which contains the collection.
collection_priority (int): The priority of the nat rule collection resource.
collection_name (str): The name of the nat rule collection which contains the rule.
action (str): The action type of a rule collection.
rule_information (dict): Rule information.
Returns:
dict: API response from Azure.
"""
payload = remove_empty_elements({
"properties": {
"priority": collection_priority,
"ruleCollections": [
{
"ruleCollectionType": "FirewallPolicyFilterRuleCollection",
"name": collection_name,
"priority": collection_priority,
"action": {
"type": action
},
"rules": [
rule_information
]
}
]
}
})
url_suffix = f'firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
response = self.ms_client.http_request('PUT', url_suffix=url_suffix, params=self.default_params,
json_data=payload, resp_type="json", timeout=100)
return response
def azure_firewall_service_tag_list_request(self, location: str, next_link: str = None) -> dict:
"""
Retrieve service tag information resources.
Args:
location (str): The location that will be used as a reference for version
next_link (str): URL to retrieve the next set of results.
Returns:
dict: API response from Azure.
"""
if next_link:
full_url = next_link
response = self.ms_client.http_request('GET', full_url=full_url, resp_type="json", timeout=100)
return response
full_url = f'https://management.azure.com/subscriptions/{self.subscription_id}' \
f'/providers/Microsoft.Network/locations/{location}/serviceTagDetails'
response = self.ms_client.http_request('GET', full_url=full_url, resp_type="json", params=self.default_params)
return response
def azure_firewall_ip_group_create_request(self, ip_group_name: str, location: str,
ip_address: list = None) -> dict:
"""
Create IP group resource.
Args:
ip_group_name (str): The name of the IP group resource to create.
location (str): The location of the IP group resource.
ip_address (list): IP addresses or IP address prefixes in the IP group resource.
Returns:
dict: API response from Azure.
"""
payload = remove_empty_elements({
"location": location,
"properties": {
"ipAddresses": ip_address
}
})
url_suffix = f'ipGroups/{ip_group_name}'
response = self.ms_client.http_request('PUT', url_suffix=url_suffix, params=self.default_params,
json_data=payload, resp_type="json", timeout=100)
return response
def azure_firewall_ip_group_list_request(self, resource: str, next_link: str = None) -> dict:
"""
List IP Groups in resource group or subscription.
Args:
resource (str): The resource which contains the IP Groups to list.
next_link (str): URL to retrieve the next set of results.
Returns:
dict: API response from Azure.
"""
if next_link:
full_url = next_link
response = self.ms_client.http_request('GET', full_url=full_url, resp_type="json", timeout=100)
return response
if resource == "resource_group":
full_url = f'https://management.azure.com/subscriptions/{self.subscription_id}' \
f'/resourceGroups/{self.resource_group}/providers/Microsoft.Network/ipGroups'
else:
full_url = f'https://management.azure.com/subscriptions/{self.subscription_id}/providers/Microsoft.Network/ipGroups'
response = self.ms_client.http_request('GET', full_url=full_url, params=self.default_params, resp_type="json",
timeout=100)
return response
def azure_firewall_ip_group_get_request(self, ip_group_name: str) -> dict:
"""
Retrieve IP group information.
Args:
ip_group_name (str): The name of the IP group resource to retrieve.
Returns:
dict: API response from Azure.
"""
url_suffix = f'ipGroups/{ip_group_name}'
response = self.ms_client.http_request('GET', url_suffix=url_suffix, params=self.default_params,
resp_type="json", timeout=100)
return response
def azure_firewall_ip_group_delete_request(self, ip_group_name: str) -> Response:
"""
Delete IP group resource.
Args:
ip_group_name (str): The name of the IP group resource to delete.
Returns:
Response: API response from Azure.
"""
url_suffix = f'ipGroups/{ip_group_name}'
response = self.ms_client.http_request('DELETE', url_suffix=url_suffix, params=self.default_params,
resp_type="response", timeout=100)
return response
def azure_firewall_ip_group_update_request(self, ip_group_name: str, ip_group_data: dict) -> dict:
"""
Update IP Group resource.
Args:
ip_group_name (str): The name of the IP Group resource to update.
ip_group_data (dict): IP Group resource JSON information.
Returns:
dict: API response from Azure.
"""
url_suffix = f'ipGroups/{ip_group_name}'
response = self.ms_client.http_request('PUT', url_suffix=url_suffix, params=self.default_params,
json_data=ip_group_data, resp_type="json", timeout=100)
return response
def generate_polling_readable_message(resource_type_name: str, resource_name: str) -> str:
"""
Generate appropriate markdown message for polling commands.
Args:
resource_type_name (str): The name type of the updated resource. For example: Policy, Firewall, IP-Group, etc.
resource_name (str): The name of the updated resource.
Returns:
str: Polling header message.
"""
return f'## Polling in progress for {resource_type_name} {resource_name}.'
def create_scheduled_command(command_name: str, interval: int, timeout: int, **kwargs):
"""
Create scheduled command object.
Args:
command_name (str): The command that'll run after next_run_in_seconds has passed.
interval (int): How long to wait before executing the command.
timeout (int): Number of seconds until the polling sequence will timeout.
Returns:
ScheduledCommand : ScheduledCommand object
"""
polling_args = {
'interval': interval,
'polling': True,
'timeout': timeout,
**kwargs
}
return ScheduledCommand(
command=command_name,
next_run_in_seconds=interval,
timeout_in_seconds=timeout,
args=polling_args,
)
def validate_pagination_arguments(limit: int, page: int) -> None:
"""
Validate pagination arguments values.
Args:
limit (int): Number of elements to retrieve.
page (int): Page number.
"""
if page < 1 or limit < 1:
raise Exception('Page and limit arguments must be greater than 0.')
def get_pagination_readable_message(header: str, limit: int, page: int) -> str:
"""
Generate pagination commands readable message.
Args:
header (str): Message header
limit (int): Number of elements to retrieve.
page (int): Page number.
Returns:
str: Readable message.
"""
readable_message = f'{header}\n Current page size: {limit}\n Showing page {page} out others that may exist.'
return readable_message
def generate_firewall_command_output(response: dict, readable_header: str, output_key: str = None) -> CommandResults:
"""
Generate command output for firewall commands.
Args:
response (dict): API response from Azure.
output_key (str): Used to access to required data in the response.
readable_header (str): Readable message header for XSOAR war room.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
if output_key:
outputs = copy.deepcopy(response.get(output_key, []))
else:
outputs = copy.deepcopy(response)
if not isinstance(outputs, list):
outputs = [outputs]
readable_data = []
for firewall in outputs:
properties = firewall.get("properties", {})
ip_configuration = properties.get("ipConfigurations", [])
ip_configuration = ip_configuration[0] if ip_configuration else {}
data = {
"name": firewall.get("name"),
"id": firewall.get("id"),
"location": firewall.get("location"),
"threat_intel_mode": properties.get("threatIntelMode"),
"private_ip_address": dict_safe_get(ip_configuration, ["properties", "privateIPAddress"]),
"subnet": dict_safe_get(ip_configuration, ["properties", "subnet", "id"]),
"provisioning_state": properties.get("provisioningState")
}
readable_data.append(data)
readable_output = tableToMarkdown(
readable_header,
readable_data,
headers=['name', 'id', 'location', 'subnet', 'threat_intel_mode', 'private_ip_address', 'provisioning_state'],
headerTransform=string_to_table_header
)
command_results = CommandResults(
readable_output=readable_output,
outputs_prefix='AzureFirewall.Firewall',
outputs_key_field='id',
outputs=outputs,
raw_response=response
)
return command_results
def azure_firewall_list_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
List azure firewalls in resource group or subscription.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
resource = args.get('resource', 'resource_group')
limit = arg_to_number(args.get('limit') or '50')
page = arg_to_number(args.get('page') or '1')
validate_pagination_arguments(limit, page)
readable_message = get_pagination_readable_message(header='Firewall List:',
limit=limit, page=page)
start_offset = (page - 1) * limit
end_offset = start_offset + limit
complete_requests = False
total_response = {'value': []}
response = client.azure_firewall_list_request(resource=resource)
while not complete_requests:
total_response['value'].extend(response.get('value'))
if len(total_response['value']) >= end_offset or not response.get('nextLink'):
complete_requests = True
else:
response = client.azure_firewall_list_request(resource=resource, next_link=response.get('nextLink'))
return generate_firewall_command_output(response.get('value')[start_offset: end_offset],
readable_header=readable_message)
def azure_firewall_get_command(client: AzureFirewallClient, args: Dict[str, Any]) -> list:
"""
Retrieve azure firewall information.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
firewall_names = argToList(args.get('firewall_names'))
scheduled = argToBoolean(args.get('polling', False))
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
command_results_list: List[CommandResults] = []
for firewall in firewall_names:
try:
response = client.azure_firewall_get_request(firewall)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if scheduled and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-get', interval=interval,
timeout=timeout, firewall_names=firewall)
# result with scheduled_command only - no update to the war room
command_results_list.append(CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(
resource_type_name="Firewall",
resource_name=firewall)))
else:
command_results = generate_firewall_command_output(response,
readable_header=f'Firewall {firewall} information:')
command_results_list.append(command_results)
except Exception as exception:
error = CommandResults(
readable_output=f'An error occurred while retrieving {firewall}.\n {exception}'
)
command_results_list.append(error)
return command_results_list
def get_firewall_rule_collection_name(rule_type: str) -> str:
"""
Get firewall rule collection API name convention.
Args:
rule_type (str): Command rule type name convention.
Returns:
str: Azure collection API name convention.
"""
rule_types = {
"network_rule": "networkRuleCollections",
"application_rule": "applicationRuleCollections",
"nat_rule": "natRuleCollections"
}
return rule_types.get(rule_type)
def get_policy_rule_collection_name(rule_type: str) -> str:
"""
Get policy rule collection API name convention.
Args:
rule_type (str): Command rule type name convention.
Returns:
str: Azure collection API name convention.
"""
rule_types = {
"network_rule": "FirewallPolicyFilterRuleCollection",
"application_rule": "FirewallPolicyFilterRuleCollection",
"nat_rule": "FirewallPolicyNatRuleCollection"
}
return rule_types.get(rule_type, "")
def get_policy_rule_name(rule_type: str) -> str:
"""
Get policy rule API name convention.
Args:
rule_type (str): Command rule type name convention.
Returns:
str: Azure collection API name convention.
"""
rule_types = {
"network_rule": "NetworkRule",
"application_rule": "ApplicationRule",
"nat_rule": "NatRule"
}
return rule_types.get(rule_type, "")
def generate_rule_collection_output(rule_collection_response: dict, readable_header: str,
outputs: list, is_firewall_collection: bool) -> CommandResults:
"""
Generate command output for rule collection commands.
Args:
rule_collection_response (dict): API response from Azure.
readable_header (str): Readable message header for XSOAR war room.
outputs (list): Output for XSOAR platform.
is_firewall_collection (bool): Indicates if the rule collection belongs to firewall or policy.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
readable_data = []
if is_firewall_collection:
for collection in outputs:
collection_name = collection.get("name")
collection_priority = dict_safe_get(collection, ["properties", "priority"])
collection_action = dict_safe_get(collection, ["properties", "action", "type"])
data = {"priority": collection_priority, "action": collection_action, "name": collection_name}
readable_data.append(data)
else: # Policy collection
for collection in outputs:
collection_action, collection_priority, collection_name = None, None, None
collection_data = dict_safe_get(collection, ["properties", "ruleCollections"])
if collection_data and isinstance(collection_data, list):
collection_action = dict_safe_get(collection_data[0], ["action", "type"])
collection_name = collection_data[0].get("name")
collection_priority = collection_data[0].get("priority")
data = {"priority": collection_priority, "action": collection_action, "name": collection_name}
readable_data.append(data)
readable_output = tableToMarkdown(
readable_header,
readable_data,
headers=['name', 'action', 'priority'],
headerTransform=pascalToSpace
)
command_results = CommandResults(
readable_output=readable_output,
outputs_prefix='AzureFirewall.RuleCollection',
outputs_key_field='id',
outputs=outputs,
raw_response=rule_collection_response
)
return command_results
def generate_rule_output(response: dict, readable_header: str,
outputs: list) -> CommandResults:
"""
Generate command output for rule commands.
Args:
response (dict): API response from Azure.
readable_header (str): Readable message header for XSOAR war room.
outputs (list): Output for XSOAR platform.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
readable_output = tableToMarkdown(
readable_header,
outputs,
headers=['name'],
headerTransform=pascalToSpace
)
command_results = CommandResults(
readable_output=readable_output,
outputs_prefix='AzureFirewall.Rule',
outputs_key_field='name',
outputs=outputs,
raw_response=response
)
return command_results
def filter_policy_rules_collection(rules_collections: list, rule_type: str) -> list:
"""
Filter policy rules collection by the rule type.
Args:
rules_collections (list): Rules collection from API response.
rule_type (str): Rule type to filter.
Returns:
list: Filtered rules collection.
"""
if not rules_collections:
return []
collection_key = get_policy_rule_collection_name(rule_type=rule_type)
rule_key = get_policy_rule_name(rule_type=rule_type)
collections = []
for collection in rules_collections:
current_collections = dict_safe_get(collection, ["properties", "ruleCollections"], [])
if isinstance(current_collections, list) and len(current_collections) > 0:
rule_collection = current_collections[0]
if rule_collection.get("ruleCollectionType") == collection_key:
if isinstance(rule_collection.get("rules"), list) and len(rule_collection.get("rules")) > 0:
if rule_collection.get("rules")[0].get("ruleType") == rule_key:
collections.append(collection)
return collections
def get_firewall_rule_collection(client: AzureFirewallClient, firewall_name: str, rule_type: str) -> tuple:
"""
Retrieve firewall rule collections.
Args:
client (AzureFirewallClient): Azure Firewall API client.
firewall_name (str): The name of the firewall which contains the collection.
rule_type (str): The name of the rule collection type to retrieve.
Returns:
tuple: response, rule_collections
"""""
response = client.azure_firewall_get_request(firewall_name=firewall_name)
rule_type_key = get_firewall_rule_collection_name(rule_type)
filtered_rules = dict_safe_get(response, ["properties", rule_type_key])
return response, filtered_rules
def azure_firewall_rules_collection_list_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
List collection rules in firewall or in policy.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
firewall_name = args.get('firewall_name')
policy = args.get('policy')
rule_type = args.get('rule_type')
limit = arg_to_number(args.get('limit') or '50')
page = arg_to_number(args.get('page') or '1')
validate_pagination_arguments(limit, page)
start_offset = (page - 1) * limit
end_offset = start_offset + limit
resource = firewall_name if firewall_name else policy
readable_message = get_pagination_readable_message(header=f'{resource} Rule Collections List:',
limit=limit, page=page)
if firewall_name:
response, filtered_rules = get_firewall_rule_collection(client, firewall_name, rule_type)
filtered_rules = filtered_rules[start_offset: end_offset]
else:
if not policy:
raise Exception('One of the arguments: ''firewall_name'' or ''policy'' must be provided.')
complete_requests = False
total_response = {'value': []}
response = client.azure_firewall_policy_rule_collection_list_request(policy_name=policy)
while not complete_requests:
total_response['value'].extend(response.get('value'))
if not response.get('nextLink'):
complete_requests = True
else:
response = client.azure_firewall_policy_rule_collection_list_request(policy_name=policy,
next_link=response.get('nextLink'))
filtered_rules = filter_policy_rules_collection(total_response.get('value'),
rule_type)[start_offset: end_offset]
return generate_rule_collection_output(rule_collection_response=response,
readable_header=readable_message, outputs=filtered_rules,
is_firewall_collection=firewall_name is not None)
def get_policy_collection_rules(client: AzureFirewallClient, policy: str, collection_name: str) -> tuple:
"""
Retrieve rules of policy rules collection.
Args:
client (AzureFirewallClient): Azure Firewall API client.
policy (str): The name of the policy which contains the rule collection.
collection_name (str): The name of the collection which contains the rules.
Returns:
tuple: API response , rules list
"""
rules = []
response = client.azure_firewall_policy_rule_collection_get_request(policy_name=policy,
collection_name=collection_name)
rules_path = ["properties", "ruleCollections"]
rule_collections = dict_safe_get(response, rules_path)
if isinstance(rule_collections, list) and len(rule_collections) > 0:
rules = rule_collections[0].get("rules")
return response, rules
def azure_firewall_rules_list_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
List rules in firewall or in policy.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
firewall_name = args.get('firewall_name')
policy = args.get('policy')
rule_type = args.get('rule_type')
collection_name = args.get('collection_name')
limit = arg_to_number(args.get('limit') or '50')
page = arg_to_number(args.get('page') or '1')
validate_pagination_arguments(limit, page)
start_offset = (page - 1) * limit
end_offset = start_offset + limit
rules = []
if firewall_name:
if not rule_type:
raise Exception('The ''rule_type'' argument must be provided for firewall rules.')
response, filtered_rules = get_firewall_rule_collection(client, firewall_name, rule_type)
readable_message = get_pagination_readable_message(header=f'Firewall {firewall_name} {rule_type} Rules List:',
limit=limit, page=page)
rules_path = ["properties", "rules"]
for rule_collection in filtered_rules:
if rule_collection.get("name") == collection_name:
rules = dict_safe_get(rule_collection, rules_path)
break
if not rules:
raise Exception(f'Collection {collection_name} is not exists in {firewall_name} firewall.')
else:
if not policy:
raise Exception('One of the arguments: ''firewall_name'' or ''policy'' must be provided.')
readable_message = get_pagination_readable_message(header=f'Policy {policy} {rule_type} Rules List:',
limit=limit, page=page)
response, rules = get_policy_collection_rules(client=client, policy=policy, collection_name=collection_name)
return generate_rule_output(response=response, readable_header=readable_message,
outputs=rules[start_offset: end_offset])
def azure_firewall_rule_get_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
Retrieve rule information.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
firewall_name = args.get('firewall_name')
policy = args.get('policy')
rule_type = args.get('rule_type')
collection_name = args.get('collection_name')
rule_name = args.get('rule_name')
rule_data = None
if firewall_name:
if not rule_type:
raise Exception('The ''rule_type'' argument must be provided for firewall rules.')
response, filtered_rules = get_firewall_rule_collection(client, firewall_name, rule_type)
rules_path = ["properties", "rules"]
for rule_collection in filtered_rules:
if rule_collection.get("name") == collection_name:
rules = dict_safe_get(rule_collection, rules_path)
for rule in rules:
if rule.get("name") == rule_name:
rule_data = rule
break
if rule_data:
break
else:
if not policy:
raise Exception('One of the arguments: ''firewall_name'' or ''policy'' must be provided.')
response, rules = get_policy_collection_rules(client=client, policy=policy, collection_name=collection_name)
for rule in rules:
if rule.get("name") == rule_name:
rule_data = rule
break
if not rule_data:
raise Exception(f'Rule {rule_name} is not exists.')
return generate_rule_output(response=response, readable_header=f'Rule {rule_name} Information:',
outputs=rule_data)
def azure_firewall_policy_create_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
Create firewall policy.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
policy_name = args.get('policy_name')
threat_intelligence_mode = args.get('threat_intelligence_mode', 'Turned-off')
threat_intelligence_mode = 'Off' if threat_intelligence_mode == 'Turned-off' else threat_intelligence_mode
ip_address = argToList(args.get('ips'))
domain_address = argToList(args.get('domains'))
location = args.get('location')
tier = args.get('tier', 'Standard')
base_policy_id = args.get('base_policy_id')
enable_proxy = argToBoolean(args.get('enable_proxy', 'False'))
dns_servers = argToList(args.get('dns_servers'))
response = client.azure_firewall_policy_create_request(
policy_name, threat_intelligence_mode, ip_address, domain_address, location, tier, base_policy_id, enable_proxy,
dns_servers)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-policy-get', interval=interval,
timeout=timeout, policy_names=policy_name)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Policy",
resource_name=policy_name))
return generate_policy_command_output(response, readable_header=f'Successfully Created Policy "{policy_name}"')
def dict_nested_set(dictionary: dict, keys: list, value: Any) -> None:
"""
Set nested dictionary value.
Args:
dictionary (dict): Dictionary to set.
keys (list): Keys for recursive get.
value (Any): Required value.
"""
keys = argToList(keys)
for key in keys[:-1]:
dictionary = dictionary.setdefault(key, {})
dictionary[keys[-1]] = value
def azure_firewall_policy_update_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
Update policy resource.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
policy_name = args.get('policy_name')
threat_intelligence_mode = args.get('threat_intelligence_mode', '')
threat_intelligence_mode = 'Off' if threat_intelligence_mode == 'Turned-off' else threat_intelligence_mode
ip_address = argToList(args.get('ips'))
domain_address = argToList(args.get('domains'))
base_policy_id = args.get('base_policy_id')
enable_proxy = argToBoolean(args.get('enable_proxy')) if args.get('enable_proxy') else None
dns_servers = argToList(args.get('dns_servers'))
policy_data = client.azure_firewall_policy_get_request(policy_name=policy_name)
properties = policy_data.get("properties")
update_fields = assign_params(threat_intelligence_mode=threat_intelligence_mode, ips=ip_address,
domains=domain_address, base_policy_id=base_policy_id,
enable_proxy=enable_proxy, dns_servers=dns_servers)
policy_fields_mapper = {
'threat_intelligence_mode': ["threatIntelMode"],
'ips': ["threatIntelWhitelist", "ipAddresses"],
'domains': ["threatIntelWhitelist", "fqdns"],
'base_policy_id': ["basePolicy", "id"],
'enable_proxy': ["dnsSettings", "enableProxy"],
'dns_servers': ["dnsSettings", "servers"]
}
for field_key, value in update_fields.items():
key_path = policy_fields_mapper.get(field_key, [])
dict_nested_set(properties, key_path, value)
response = client.azure_firewall_policy_update_request(policy_name=policy_name, policy_data=policy_data)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-policy-get', interval=interval,
timeout=timeout, policy_names=policy_name)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Policy",
resource_name=policy_name))
return generate_policy_command_output(response, readable_header=f'Successfully Updated Policy "{policy_name}"')
def generate_policy_command_output(response: dict, readable_header: str, output_key: str = None) -> CommandResults:
"""
Generate command output for policy commands.
Args:
response (dict): API response from Azure.
output_key (str): Used to access to required data in the response.
readable_header (str): Readable message header for XSOAR war room.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
if output_key:
outputs = copy.deepcopy(response.get(output_key, []))
else:
outputs = copy.deepcopy(response)
if not isinstance(outputs, list):
outputs = [outputs]
readable_data = []
for policy in outputs:
name = policy.get('name')
id = policy.get('id')
location = policy.get("location")
properties = policy.get("properties", {})
threat_intel_mode = properties.get("threatIntelMode")
tier = dict_safe_get(properties, ['sku', 'tier'])
child_policies = properties.get("childPolicies", [])
child_policies = [child_policy.get('id') for child_policy in child_policies]
firewalls = properties.get("firewalls", {})
firewalls = [firewall.get('id') for firewall in firewalls]
base_policy = dict_safe_get(properties, ["basePolicy", "id"])
provisioning_state = properties.get("provisioningState")
data = dict(name=name, location=location, threat_intel_mode=threat_intel_mode, child_policies=child_policies,
firewalls=firewalls, base_policy=base_policy, provisioning_state=provisioning_state,
id=id, tier=tier)
readable_data.append(data)
readable_output = tableToMarkdown(
readable_header,
readable_data,
headers=['name', 'id', 'tier', 'location', 'firewalls', 'base_policy', 'child_policies', 'provisioning_state'],
headerTransform=string_to_table_header
)
command_results = CommandResults(
readable_output=readable_output,
outputs_prefix='AzureFirewall.Policy',
outputs_key_field='id',
outputs=outputs,
raw_response=response
)
return command_results
def azure_firewall_policy_get_command(client: AzureFirewallClient, args: Dict[str, Any]) -> list:
"""
Retrieve policy information.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
policy_names = argToList(args.get('policy_names'))
scheduled = argToBoolean(args.get('polling', False))
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
command_results_list: List[CommandResults] = []
for policy in policy_names:
try:
response = client.azure_firewall_policy_get_request(policy)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if scheduled and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-policy-get',
interval=interval, timeout=timeout, policy_names=policy)
# result with scheduled_command only - no update to the war room
command_results_list.append(CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(
resource_type_name="Policy",
resource_name=policy)))
else:
command_results_list.append(
generate_policy_command_output(response, readable_header=f'Policy {policy} information:'))
except Exception as exception:
error = CommandResults(
readable_output=f'An error occurred while retrieving {policy}.\n {exception}'
)
command_results_list.append(error)
return command_results_list
def azure_firewall_policy_delete_command(client: AzureFirewallClient, args: Dict[str, Any]) -> list:
"""
Delete policy resource.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
policy_names = argToList(args.get('policy_names'))
command_results_list: List[CommandResults] = []
for policy in policy_names:
try:
response = client.azure_firewall_policy_delete_request(policy)
if response.status_code == 202:
readable_output = f'Policy {policy} delete operation accepted and will complete asynchronously.'
else:
readable_output = f'Policy {policy} deleted successfully.'
command_results_list.append(CommandResults(
readable_output=readable_output
))
except Exception as exception:
error = CommandResults(
readable_output=f'An error occurred while deleting {policy}.\n {exception}'
)
command_results_list.append(error)
return command_results_list
def azure_firewall_policy_list_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
List policy in resource group or subscription.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
resource = args.get('resource', 'resource_group')
limit = arg_to_number(args.get('limit') or '50')
page = arg_to_number(args.get('page') or '1')
validate_pagination_arguments(limit, page)
readable_message = get_pagination_readable_message(header='Policy List:',
limit=limit, page=page)
start_offset = (page - 1) * limit
end_offset = start_offset + limit
complete_requests = False
total_response = {'value': []}
response = client.azure_firewall_policy_list_request(resource=resource)
while not complete_requests:
total_response['value'].extend(response.get('value'))
if len(total_response['value']) >= end_offset or not response.get('nextLink'):
complete_requests = True
else:
response = client.azure_firewall_policy_list_request(resource=resource,
next_link=response.get('nextLink'))
return generate_policy_command_output(total_response.get('value')[start_offset: end_offset],
readable_header=readable_message)
def azure_firewall_policy_attach_command(client: AzureFirewallClient, args: Dict[str, Any]) -> list:
"""
Attach policy to firewall.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
firewall_names = argToList(args.get('firewall_names'))
policy_id = args.get('policy_id')
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
command_results_list: List[CommandResults] = []
for firewall in firewall_names:
try:
firewall_data = client.azure_firewall_get_request(firewall_name=firewall)
firewall_data["properties"]["firewallPolicy"] = {"id": policy_id}
response = client.azure_firewall_update_request(firewall_name=firewall, firewall_data=firewall_data)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-get', interval=interval,
timeout=timeout, firewall_names=firewall)
# result with scheduled_command only - no update to the war room
command_results_list.append(CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(
resource_type_name="Firewall",
resource_name=firewall)))
else:
command_results_list.append(generate_firewall_command_output(response,
readable_header=f'Successfully Updated Firewall '
f'"{firewall}"'))
except Exception as exception:
error = CommandResults(
readable_output=f'An error occurred while updating {firewall}.\n {exception}'
)
command_results_list.append(error)
return command_results_list
def azure_firewall_policy_remove_command(client: AzureFirewallClient, args: Dict[str, Any]) -> list:
"""
Remove policy from firewall. This command will detach between policy and firewall, and not delete the policy.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
firewall_names = argToList(args.get('firewall_names'))
command_results_list: List[CommandResults] = []
for firewall in firewall_names:
try:
firewall_data = client.azure_firewall_get_request(firewall_name=firewall)
firewall_data["properties"].pop("firewallPolicy", None)
response = client.azure_firewall_update_request(firewall_name=firewall, firewall_data=firewall_data)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-get', interval=interval,
timeout=timeout, firewall_names=firewall)
# result with scheduled_command only - no update to the war room
command_results_list.append(CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(
resource_type_name="Firewall",
resource_name=firewall)))
else:
command_results_list.append(generate_firewall_command_output(response,
readable_header=f'Successfully Updated Firewall '
f'"{firewall}"'))
except Exception as exception:
error = CommandResults(
readable_output=f'An error occurred while updating {firewall}.\n {exception}'
)
command_results_list.append(error)
return command_results_list
def delete_rule_collection(client: AzureFirewallClient, collection_name: str, rule_type: str, firewall_name: str = None,
policy: str = None, should_poll: bool = False, interval: int = 30,
timeout: int = 60) -> CommandResults:
"""
Delete rule collection from firewall or policy.
Args:
client (AzureFirewallClient): Azure Firewall API client.
collection_name (str): The name of the rule collection to delete.
rule_type (str): The name of the rule collection type.
firewall_name (str): The name of the firewall which contains the collection.
policy (str): The name of the policy which contains the collection.
should_poll (bool): Use Cortex XSOAR built-in polling to retrieve the resource
when it's finished the updating process.
interval (int): Indicates how long to wait between command execution.
timeout (int): Indicates the time in seconds until the polling sequence timeouts.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
if firewall_name:
firewall_data, filtered_rules = get_firewall_rule_collection(client, firewall_name, rule_type=rule_type)
collection_index = -1
for index, collection in enumerate(filtered_rules):
if collection.get("name") == collection_name:
collection_index = index
break
if collection_index == -1:
raise Exception(f'Collection {collection_name} is not exists in {firewall_name} firewall.')
del filtered_rules[collection_index]
response = client.azure_firewall_update_request(firewall_name=firewall_name, firewall_data=firewall_data)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-get', interval=interval,
timeout=timeout, firewall_names=firewall_name)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Firewall",
resource_name=firewall_name))
else:
return generate_firewall_command_output(response,
readable_header=f'Successfully Updated Firewall "{firewall_name}"')
else:
if not policy:
raise Exception('One of the arguments: ''firewall_name'' or ''policy'' must be provided.')
response = client.azure_firewall_policy_rule_collection_delete_request(policy_name=policy,
collection_name=collection_name)
is_resource_deleted = response.status_code == 200
if should_poll and not is_resource_deleted:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-policy-get', interval=interval,
timeout=timeout, policy_names=policy)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Policy",
resource_name=policy))
response = client.azure_firewall_policy_get_request(policy)
return generate_policy_command_output(response, readable_header=f'Successfully Updated Policy "{policy}"')
def add_rule_to_policy_collection(client: AzureFirewallClient, policy: str, collection_name: str,
rule_object: dict, rule_name: str) -> dict:
"""
Add rule to policy rule collection
Args:
client (AzureFirewallClient): Azure Firewall API client.
policy (str): The name of the policy which contains the collection.
collection_name (str): The name of the rule collection which contains the rule.
rule_object (dict): Policy rule information.
rule_name (str): The name of the rule to create.
Returns:
dict: API response from Azure.
"""
collection_information = client.azure_firewall_policy_rule_collection_get_request(
policy_name=policy, collection_name=collection_name)
for rule in collection_information["properties"]["ruleCollections"][0]["rules"]:
if rule.get("name") == rule_name:
raise Exception(f'Rule {rule_name} already exists.')
collection_information["properties"]["ruleCollections"][0]["rules"].append(rule_object)
return client.azure_firewall_policy_rule_collection_create_or_update_request(policy_name=policy,
collection_name=collection_name,
collection_data=collection_information)
def remove_rule_from_collection(client: AzureFirewallClient, collection_name: str, rule_type: str, rule_names: list,
firewall_name: str = None, policy: str = None, should_poll: bool = False,
interval: int = 30, timeout: int = 60) -> list:
"""
Remove rule from collection in firewall or policy.
Args:
client (AzureFirewallClient): Azure Firewall API client.
collection_name (str): The name of the rule collection which contains the rule.
rule_type (str): The name of the rule collection type.
rule_names (list): The name of the rule to remove.
firewall_name (str): The name of the firewall which contains the collection.
policy (str): The name of the policy which contains the collection.
should_poll (bool): Use Cortex XSOAR built-in polling to retrieve the resource
when it's finished the updating process.
interval (int): Indicates how long to wait between command execution.
timeout (int): Indicates the time in seconds until the polling sequence timeouts.
Returns:
list[CommandResults]: outputs, readable outputs and raw response for XSOAR.
"""
command_results_list: List[CommandResults] = []
if firewall_name:
firewall_data, filtered_rules = get_firewall_rule_collection(client, firewall_name, rule_type=rule_type)
collection_index = -1
for index, collection in enumerate(filtered_rules):
if collection.get("name") == collection_name:
collection_index = index
break
if collection_index == -1:
raise Exception(f'Collection {collection_name} is not exists.')
for rule_name in rule_names:
rule_index = -1
for index, rule in enumerate(dict_safe_get(filtered_rules[collection_index], ["properties", "rules"], [])):
if rule.get("name") == rule_name:
rule_index = index
break
if rule_index == -1:
error = CommandResults(
readable_output=f'Rule {rule_name} is not exists.'
)
command_results_list.append(error)
continue
del filtered_rules[collection_index]["properties"]["rules"][rule_index]
response = client.azure_firewall_update_request(firewall_name=firewall_name, firewall_data=firewall_data)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-get', interval=interval,
timeout=timeout, firewall_names=firewall_name)
command_results_list.append(CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(
resource_type_name="Firewall",
resource_name=firewall_name)))
else:
command_results_list.append(generate_firewall_command_output(response,
readable_header=f'Successfully Updated Firewall '
f'"{firewall_name}"'))
else:
if not policy:
raise Exception('One of the arguments: ''firewall_name'' or ''policy'' must be provided.')
collection_information = client.azure_firewall_policy_rule_collection_get_request(
policy_name=policy, collection_name=collection_name)
rules = collection_information["properties"]["ruleCollections"][0]["rules"]
for rule_name in rule_names:
rule_index = -1
for index, rule in enumerate(rules):
if rule.get("name") == rule_name:
rule_index = index
if rule_index == -1:
error = CommandResults(
readable_output=f'Rule {rule_name} is not exists.'
)
command_results_list.append(error)
continue
del rules[rule_index]
response = client.azure_firewall_policy_rule_collection_create_or_update_request(policy_name=policy,
collection_name=collection_name,
collection_data=collection_information)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-policy-get', interval=interval,
timeout=timeout, policy_names=policy)
command_results_list.append(CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(
resource_type_name="Policy",
resource_name=policy)))
else:
response = client.azure_firewall_policy_get_request(policy)
command_results_list.append(
generate_policy_command_output(response, readable_header=f'Successfully Updated Policy "{policy}"'))
return command_results_list
def create_firewall_collection(client: AzureFirewallClient, firewall_name: str, rule_type: str,
collection_object: dict) -> dict:
"""
Create firewall rules collection.
Args:
client (AzureFirewallClient): Azure Firewall API client.
firewall_name (str): The name of the firewall which contains the collection.
rule_type (str): The name of the rule collection type.
collection_object (dict): Collection information.
Returns:
dict: API response from Azure.
"""
firewall_data = client.azure_firewall_get_request(firewall_name=firewall_name)
rule_type_key = get_firewall_rule_collection_name(rule_type=rule_type)
firewall_data["properties"][rule_type_key].append(collection_object)
return client.azure_firewall_update_request(firewall_name=firewall_name, firewall_data=firewall_data)
def validate_predefined_argument(argument_name: str, argument_value: object, argument_options: list) -> bool:
"""
Validate predefined argument is a valid option.
Args:
argument_name (str): The name of the argument to validate.
argument_value (object): The value of the argument to validate.
argument_options (list): Argument predifuend options.
Returns:
bool: True if the argument is valid, otherwise raise an exception.
"""
if not isinstance(argument_value, list):
argument_value = [argument_value]
for value in argument_value:
if value not in argument_options:
raise Exception(f'Invalid {argument_name} argument. Please provide one of the following options:'
f'{str(argument_options)}')
return True
def validate_network_rule_properties(source_type: str, destination_type: str, protocols: list,
ip_source_address: list = None, source_ip_group_ids: list = None) -> bool:
"""
Validate the provided network rule properties are valid.
Args:
source_type (str): Rule source type.
destination_type (str):
protocols (list): Protocols for the created rule
ip_source_address (str): Source IP addresses for the created rule
source_ip_group_ids (str): Source IP group IDs for the created rule.
Returns:
bool: True if the properties are valid, otherwise raise an exception.
"""
validate_predefined_argument(argument_name='protocols', argument_value=protocols,
argument_options=['TCP', 'UDP', 'ICMP', 'Any'])
validate_predefined_argument(argument_name='source_type', argument_value=source_type,
argument_options=['ip_address', 'ip_group'])
validate_predefined_argument(argument_name='destination_type', argument_value=destination_type,
argument_options=['ip_address', 'ip_group', 'service_tag', 'fqdn'])
if source_type == 'ip_address' and not ip_source_address:
raise Exception('"ip_source_address" argument most be provided when "ip_address" argument is provided.')
if source_type == 'ip_group' and not source_ip_group_ids:
raise Exception('"source_ip_group_ids" argument most be provided when "ip_group" argument is provided.')
return True
def create_firewall_network_rule_object(rule_name: str, description: str, protocol: list, source_type: str,
destination_type: str, destinations: list, destination_port: list,
ip_source_address: list = None, source_ip_group_ids: list = None,
is_firewall_rule: bool = False) -> dict:
"""
Generate network rule object for firewall resource.
Args:
rule_name (str): The name of the rule.
description (str): The description of the rule.
protocol (list): Protocols of the rule.
source_type (str): Rule source type.
destination_type (str): Rule destination type.
destinations (list): Destinations of the rule.
destination_port (list): Destination ports םכ איק רוךקץ
ip_source_address (list): Source IP addresses of the rule.
source_ip_group_ids (list): Source IP group IDs of the rule.
is_firewall_rule (bool): Indicates if the rule belongs to firewall or policy.
Returns:
dict: Rule object information.
"""
rule_object = {
"name": rule_name,
"description": description,
"destinationPorts": destination_port,
}
if source_type == 'ip_address':
rule_object["sourceAddresses"] = ip_source_address
else: # source_type == 'ip_group'
rule_object["sourceIpGroups"] = source_ip_group_ids
destination_path = {"ip_address": "destinationAddresses", "ip_group": "destinationIpGroups",
"service_tag": "destinationAddresses", "fqdn": "destinationFqdns"}
rule_object[destination_path[destination_type]] = destinations
if is_firewall_rule:
rule_object["protocols"] = protocol
else:
rule_object["ipProtocols"] = protocol
rule_object["ruleType"] = "NetworkRule"
return rule_object
def azure_firewall_network_rule_collection_create_command(client: AzureFirewallClient,
args: Dict[str, Any]) -> CommandResults:
"""
Create network rule collection in firewall or policy.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
firewall_name = args.get('firewall_name')
policy = args.get('policy')
collection_name = args.get('collection_name')
collection_priority = arg_to_number(args.get('collection_priority'))
action = args.get('action')
rule_name = args.get('rule_name')
description = args.get('description')
protocol = argToList(args.get('protocols'))
source_type = args.get('source_type') # ip_address or ip_group
ip_source_address = argToList(
args.get('source_ips')) # Must be provided when 'source_type' argument is assigned to 'ip_address'.
source_ip_group_ids = argToList(
args.get('source_ip_group_ids')) # Must be provided when 'source_type' argument is assigned to 'ip_group'.
destination_type = args.get('destination_type') # ip_address or ip_group or service_tag or fqdn.
destinations = argToList(args.get('destinations'))
destination_port = argToList(args.get('destination_ports'))
validate_network_rule_properties(source_type=source_type, destination_type=destination_type, protocols=protocol,
ip_source_address=ip_source_address, source_ip_group_ids=source_ip_group_ids)
rule_information = create_firewall_network_rule_object(rule_name=rule_name, description=description,
protocol=protocol,
source_type=source_type,
destination_type=destination_type,
destinations=destinations,
destination_port=destination_port,
ip_source_address=ip_source_address,
source_ip_group_ids=source_ip_group_ids,
is_firewall_rule=policy is None)
if firewall_name:
collection_object = remove_empty_elements({
"name": collection_name,
"properties": {
"priority": collection_priority,
"action": {
"type": action
},
"rules": [
rule_information
]
}
})
response = create_firewall_collection(client=client, firewall_name=firewall_name, rule_type="network_rule",
collection_object=collection_object)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-get', interval=interval,
timeout=timeout, firewall_names=firewall_name)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Firewall",
resource_name=firewall_name))
else:
return generate_firewall_command_output(response,
readable_header=f'Successfully Updated Firewall "{firewall_name}"')
else:
if not policy:
raise Exception('One of the arguments: ''firewall_name'' or ''policy'' must be provided.')
collection_information = None
try:
collection_information = client.azure_firewall_policy_rule_collection_get_request(
policy_name=policy,
collection_name=collection_name)
except NotFoundError:
pass
if collection_information:
raise Exception(f'The collection {collection_name} already exists in policy.')
response = client.azure_firewall_policy_network_rule_collection_create_request(policy_name=policy,
collection_priority=collection_priority,
collection_name=collection_name,
action=action,
rule_information=rule_information)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-policy-get', interval=interval,
timeout=timeout, policy_names=policy)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Policy",
resource_name=policy))
response = client.azure_firewall_policy_get_request(policy)
return generate_policy_command_output(response, readable_header=f'Successfully Updated Policy "{policy}"')
def update_policy_rule_collection(client: AzureFirewallClient, policy: str, collection_name: str,
priority: int = None, action: str = None) -> dict:
"""
Update rule collection in policy
Args:
client ():
policy (str): The name of the policy which contains the collection.
collection_name (str): The name of the rule collection to update.
priority (int): The priority of the rule collection resource.
action (str): The action type of a rule collection.
Returns:
dict: API response from Azure.
"""
collection_information = client.azure_firewall_policy_rule_collection_get_request(
policy_name=policy, collection_name=collection_name)
rule_collections = dict_safe_get(collection_information, ["properties", "ruleCollections"], [])
if action:
rule_collections[0]["action"]["type"] = action
if priority:
rule_collections[0]["priority"] = priority
collection_information["properties"]["priority"] = priority
return client.azure_firewall_policy_rule_collection_create_or_update_request(policy_name=policy,
collection_name=collection_name,
collection_data=collection_information)
def azure_firewall_network_rule_collection_update_command(client: AzureFirewallClient,
args: Dict[str, Any]) -> CommandResults:
"""
Update network rule collection in firewall or policy.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
firewall_name = args.get('firewall_name')
policy = args.get('policy')
collection_name = args.get('collection_name')
priority = args.get('priority')
if priority:
priority = arg_to_number(priority)
action = args.get('action')
if firewall_name:
firewall_data, filtered_rules = get_firewall_rule_collection(client, firewall_name,
rule_type="network_rule")
collection_index = -1
for index, collection in enumerate(filtered_rules):
if collection.get("name") == collection_name:
collection_index = index
break
if collection_index == -1:
raise Exception(f'Collection {collection_name} is not exists in {firewall_name} firewall.')
if action:
filtered_rules[collection_index]["properties"]["action"]["type"] = action
if priority:
filtered_rules[collection_index]["properties"]["priority"] = priority
response = client.azure_firewall_update_request(firewall_name=firewall_name, firewall_data=firewall_data)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-get', interval=interval,
timeout=timeout, firewall_names=firewall_name)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Firewall",
resource_name=firewall_name))
else:
return generate_firewall_command_output(response,
readable_header=f'Successfully Updated Firewall "{firewall_name}"')
else:
if not policy:
raise Exception('One of the arguments: ''firewall_name'' or ''policy'' must be provided.')
response = update_policy_rule_collection(client=client, policy=policy, collection_name=collection_name,
priority=priority,
action=action)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-policy-get', interval=interval,
timeout=timeout, policy_names=policy)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Policy",
resource_name=policy))
response = client.azure_firewall_policy_get_request(policy)
return generate_policy_command_output(response, readable_header=f'Successfully Updated Policy "{policy}"')
def azure_firewall_network_rule_collection_delete_command(client: AzureFirewallClient,
args: Dict[str, Any]) -> CommandResults:
"""
Delete network rule collection from firewall or policy.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
firewall_name = args.get('firewall_name')
policy = args.get('policy')
collection_name = args.get('collection_name')
return delete_rule_collection(client=client, collection_name=collection_name, rule_type="network_rule",
firewall_name=firewall_name, policy=policy, should_poll=should_poll,
interval=interval, timeout=timeout)
def add_rule_to_firewall_collection(client: AzureFirewallClient, firewall_name: str, collection_name: str,
rule_type: str, rule_object: dict) -> dict:
"""
Add rule to firewall rule collection.
Args:
client (AzureFirewallClient): Azure Firewall API client.
firewall_name (str): The name of the firewall which contains the collection.
collection_name (str): The name of the rule collection which contains the rule.
rule_type (str) The name of the rule collection type.
rule_object (dict): Firewall rule information.
Returns:
dict: API response from Azure.
"""
firewall_data, filtered_rules = get_firewall_rule_collection(client, firewall_name,
rule_type=rule_type)
collection_index = -1
for index, collection in enumerate(filtered_rules):
if collection.get("name") == collection_name:
collection_index = index
break
if collection_index == -1:
raise Exception(f'Collection {collection_name} is not exists.')
filtered_rules[collection_index]["properties"]["rules"].append(rule_object)
return client.azure_firewall_update_request(firewall_name=firewall_name, firewall_data=firewall_data)
def azure_firewall_network_rule_create_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
Create network rule in firewall or policy rule collection.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
firewall_name = args.get('firewall_name')
policy = args.get('policy')
collection_name = args.get('collection_name')
rule_name = args.get('rule_name')
description = args.get('description')
protocol = argToList(args.get('protocols'))
source_type = args.get('source_type') # ip_address or ip_group
ip_source_address = argToList(
args.get('source_ips')) # Must be provided when 'source_type' argument is assigned to 'ip_address'.
source_ip_group_ids = argToList(
args.get('source_ip_group_ids')) # Must be provided when 'source_type' argument is assigned to 'ip_group'.
destination_type = args.get('destination_type') # ip_address or ip_group or service_tag or fqdn.
destinations = argToList(args.get('destinations'))
destination_port = argToList(args.get('destination_ports'))
validate_network_rule_properties(source_type=source_type, destination_type=destination_type, protocols=protocol,
ip_source_address=ip_source_address, source_ip_group_ids=source_ip_group_ids)
rule_information = create_firewall_network_rule_object(rule_name=rule_name, description=description,
protocol=protocol,
source_type=source_type,
destination_type=destination_type,
destinations=destinations,
destination_port=destination_port,
ip_source_address=ip_source_address,
source_ip_group_ids=source_ip_group_ids,
is_firewall_rule=policy is None)
if firewall_name:
response = add_rule_to_firewall_collection(client=client, firewall_name=firewall_name,
collection_name=collection_name,
rule_type="network_rule", rule_object=rule_information)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-get', interval=interval,
timeout=timeout, firewall_names=firewall_name)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Firewall",
resource_name=firewall_name))
else:
return generate_firewall_command_output(response,
readable_header=f'Successfully Updated Firewall "{firewall_name}"')
else:
if not policy:
raise Exception('One of the arguments: ''firewall_name'' or ''policy'' must be provided.')
response = add_rule_to_policy_collection(client=client, policy=policy, collection_name=collection_name,
rule_object=rule_information, rule_name=rule_name)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-policy-get', interval=interval,
timeout=timeout, policy_names=policy)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Policy",
resource_name=policy))
response = client.azure_firewall_policy_get_request(policy)
return generate_policy_command_output(response, readable_header=f'Successfully Updated Policy "{policy}"')
def update_firewall_collection_rule(client: AzureFirewallClient, firewall_name: str, collection_name: str,
rule_name: str, rule_type: str, rule_fields_mapper: dict,
update_fields: dict) -> dict:
"""
Update rule in firewall rules collection.
Args:
client (AzureFirewallClient): Azure Firewall API client.
firewall_name (str): The name of the firewall which contains the collection.
collection_name (str): The name of the rule collection which contains the rule.
rule_name (str): The name of the rule to update
rule_type (str): The name of the rule collection type.
rule_fields_mapper (dict): Mapper between field name and Azure field name convention.
update_fields (dict): New rule information to update.
Returns:
dict: API response from Azure.
"""
firewall_data, filtered_rules = get_firewall_rule_collection(client, firewall_name, rule_type=rule_type)
collection_index = -1
rule_found = False
for index, collection in enumerate(filtered_rules):
if collection.get("name") == collection_name:
collection_index = index
break
if collection_index == -1:
raise Exception(f'Collection {collection_name} is not exists.')
for rule in dict_safe_get(filtered_rules[collection_index], ["properties", "rules"], []):
if rule.get("name") == rule_name:
rule_found = True
for field_key, value in update_fields.items():
key_path = rule_fields_mapper.get(field_key, [])
rule[key_path] = value
break
if not rule_found:
raise Exception(f'Rule {rule_name} is not exists.')
return client.azure_firewall_update_request(firewall_name=firewall_name, firewall_data=firewall_data)
def update_policy_collection_rule(client: AzureFirewallClient, policy: str, collection_name: str,
rule_name: str, rule_fields_mapper: dict,
update_fields: dict) -> dict:
"""
Update rule in policy rules collection.
Args:
client (AzureFirewallClient): Azure Firewall API client.
policy (str): The name of the policy which contains the collection.
collection_name (str): The name of the rule collection which contains the rule.
rule_name (str): The name of the rule to update
rule_fields_mapper (dict): Mapper between field name and Azure field name convention.
update_fields (dict): New rule information to update.
Returns:
dict: API response from Azure.
"""
collection_information = client.azure_firewall_policy_rule_collection_get_request(
policy_name=policy, collection_name=collection_name)
rules = collection_information["properties"]["ruleCollections"][0]["rules"]
rule_found = False
for rule in rules:
if rule.get("name") == rule_name:
rule_found = True
for field_key, value in update_fields.items():
key_path = rule_fields_mapper.get(field_key, [])
rule[key_path] = value
break
if not rule_found:
raise Exception(f'Rule {rule_name} is not exists.')
return client.azure_firewall_policy_rule_collection_create_or_update_request(policy_name=policy,
collection_name=collection_name,
collection_data=collection_information)
def azure_firewall_network_rule_update_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
Update network rule in firewall or policy collection.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
firewall_name = args.get('firewall_name')
policy = args.get('policy')
collection_name = args.get('collection_name')
rule_name = args.get('rule_name')
description = args.get('description')
protocol = argToList(args.get('protocols'))
ip_source_address = argToList(args.get('source_ips'))
source_ip_group_ids = argToList(args.get('source_ip_group_ids'))
destination_port = argToList(args.get('destination_ports'))
destination_type = args.get('destination_type')
source_type = args.get('source_type')
destinations = argToList(args.get('destinations'))
update_fields = assign_params(description=description, destination_port=destination_port,
protocol=protocol)
rule_fields_mapper = {"description": "description", "destination_port": "destinationPorts"}
if source_type:
if source_type == 'ip_address':
rule_fields_mapper["ip_source_address"] = "sourceAddresses"
update_fields["ip_source_address"] = ip_source_address
else: # source_type == 'ip_group'
rule_fields_mapper["ip_source_address"] = "sourceIpGroups"
update_fields["ip_source_address"] = source_ip_group_ids
if destinations:
destination_path = {"ip_address": "destinationAddresses", "ip_group": "destinationIpGroups",
"service_tag": "destinationAddresses", "fqdn": "destinationFqdns"}
rule_fields_mapper["ip_destination_address"] = destination_path[destination_type]
update_fields["ip_destination_address"] = destinations
if firewall_name:
if protocol:
rule_fields_mapper["protocol"] = "protocols"
response = update_firewall_collection_rule(client=client, firewall_name=firewall_name,
collection_name=collection_name,
rule_name=rule_name, rule_type="network_rule",
rule_fields_mapper=rule_fields_mapper,
update_fields=update_fields)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-get', interval=interval,
timeout=timeout, firewall_names=firewall_name)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Firewall",
resource_name=firewall_name))
else:
return generate_firewall_command_output(response,
readable_header=f'Successfully Updated Firewall "{firewall_name}"')
else:
if not policy:
raise Exception('One of the arguments: ''firewall_name'' or ''policy'' must be provided.')
if protocol:
rule_fields_mapper["protocol"] = "ipProtocols"
response = update_policy_collection_rule(client=client, policy=policy, collection_name=collection_name,
rule_name=rule_name, rule_fields_mapper=rule_fields_mapper,
update_fields=update_fields)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-policy-get', interval=interval,
timeout=timeout, policy_names=policy)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="Policy",
resource_name=policy))
response = client.azure_firewall_policy_get_request(policy)
return generate_policy_command_output(response, readable_header=f'Successfully Updated Policy "{policy}"')
def azure_firewall_network_rule_remove_command(client: AzureFirewallClient, args: Dict[str, Any]) -> list:
"""
Remove network rule from rules collection.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
list[CommandResults]: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
firewall_name = args.get('firewall_name')
policy = args.get('policy')
collection_name = args.get('collection_name')
rule_names = argToList(args.get('rule_names'))
return remove_rule_from_collection(client=client, collection_name=collection_name, rule_type="network_rule",
rule_names=rule_names, firewall_name=firewall_name, policy=policy,
should_poll=should_poll, interval=interval, timeout=timeout)
def azure_firewall_service_tag_list_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
Retrieve service tags information.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
location = args.get('location')
limit = arg_to_number(args.get('limit') or '50')
page = arg_to_number(args.get('page') or '1')
validate_pagination_arguments(limit, page)
readable_message = get_pagination_readable_message(header='Service Tag List:',
limit=limit, page=page)
start_offset = (page - 1) * limit
end_offset = start_offset + limit
complete_requests = False
total_response = {'value': []}
response = client.azure_firewall_service_tag_list_request(location=location)
while not complete_requests:
total_response['value'].extend(response.get('value'))
if len(total_response['value']) >= end_offset or not response.get('nextLink'):
complete_requests = True
else:
response = client.azure_firewall_service_tag_list_request(location=location,
next_link=response.get('nextLink'))
readable_output = tableToMarkdown(
readable_message,
total_response.get('value')[start_offset: end_offset],
headers=['name', 'id'],
headerTransform=string_to_table_header
)
command_results = CommandResults(
readable_output=readable_output,
outputs_prefix='AzureFirewall.ServiceTag',
outputs_key_field='id',
outputs=total_response.get('value')[start_offset: end_offset],
raw_response=total_response
)
return command_results
def generate_ip_group_command_output(response: dict, readable_header: str, output_key: str = None) -> CommandResults:
"""
Generate command output for IP groups commands.
Args:
response (dict): API response from Azure.
output_key (str): Used to access to required data in the response.
readable_header (str): Readable message header for XSOAR war room.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
if output_key:
outputs = copy.deepcopy(response.get(output_key, []))
else:
outputs = copy.deepcopy(response)
if not isinstance(outputs, list):
outputs = [outputs]
readable_data = []
for ip_group in outputs:
properties = ip_group.get("properties")
data = {
"name": ip_group.get("name"),
"id": ip_group.get("id"),
**properties,
}
readable_data.append(data)
readable_output = tableToMarkdown(
readable_header,
readable_data,
headers=['name', 'id', 'ipAddresses', 'firewalls', 'firewallPolicies', 'provisioningState'],
headerTransform=pascalToSpace
)
command_results = CommandResults(
readable_output=readable_output,
outputs_prefix='AzureFirewall.IPGroup',
outputs_key_field='id',
outputs=outputs,
raw_response=response
)
return command_results
def azure_firewall_ip_group_create_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
Create IP group resource.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
ip_group_name = args.get('ip_group_name')
location = args.get('location')
ip_address = argToList(args.get('ips'))
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
response = client.azure_firewall_ip_group_create_request(ip_group_name=ip_group_name, location=location,
ip_address=ip_address)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-ip-group-get', interval=interval,
timeout=timeout, ip_group_names=ip_group_name)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="IP-Group",
resource_name=ip_group_name))
return generate_ip_group_command_output(response,
readable_header=f'Successfully Created IP Group "{ip_group_name}"')
def azure_firewall_ip_group_update_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
Update IP group. Add or remove IPs from the group.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
ip_group_name = args.get('ip_group_name')
ip_address_to_add = argToList(args.get('ips_to_add'))
ip_address_to_remove = argToList(args.get('ips_to_remove'))
should_poll = True
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
if not ip_address_to_add and not ip_address_to_remove:
raise Exception('One of the arguments: ''ip_address_to_add'' or ''ip_address_to_remove'' must be provided.')
ip_group_data = client.azure_firewall_ip_group_get_request(ip_group_name=ip_group_name)
ip_addresses = dict_safe_get(ip_group_data, ["properties", "ipAddresses"])
ip_addresses.extend(ip_address_to_add)
for ip_item in ip_address_to_remove:
try:
ip_addresses.remove(ip_item)
except ValueError:
continue
response = client.azure_firewall_ip_group_update_request(ip_group_name=ip_group_name, ip_group_data=ip_group_data)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if should_poll and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-ip-group-get', interval=interval,
timeout=timeout, ip_group_names=ip_group_name)
return CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(resource_type_name="IP-Group",
resource_name=ip_group_name))
return generate_ip_group_command_output(response, readable_header=f'{ip_group_name} IP Group Information:')
def azure_firewall_ip_group_list_command(client: AzureFirewallClient, args: Dict[str, Any]) -> CommandResults:
"""
List IP groups in resource group or subscription.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
resource = args.get('resource')
limit = arg_to_number(args.get('limit') or '50')
page = arg_to_number(args.get('page') or '1')
validate_pagination_arguments(limit, page)
readable_message = get_pagination_readable_message(header='IP Group List:',
limit=limit, page=page)
start_offset = (page - 1) * limit
end_offset = start_offset + limit
complete_requests = False
total_response = {'value': []}
response = client.azure_firewall_ip_group_list_request(resource=resource)
while not complete_requests:
total_response['value'].extend(response.get('value'))
if len(total_response['value']) >= end_offset or not response.get('nextLink'):
complete_requests = True
else:
response = client.azure_firewall_ip_group_list_request(resource=resource,
next_link=response.get('nextLink'))
return generate_ip_group_command_output(total_response.get('value')[start_offset: end_offset],
readable_header=readable_message)
def azure_firewall_ip_group_get_command(client: AzureFirewallClient, args: Dict[str, Any]) -> list:
"""
Retrieve IP group information.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ScheduledCommand.raise_error_if_not_supported()
ip_group_names = argToList(args.get('ip_group_names'))
scheduled = argToBoolean(args.get('polling', False))
interval = arg_to_number(args.get('interval', 30))
timeout = arg_to_number(args.get('timeout', 60))
command_results_list: List[CommandResults] = []
for ip_group in ip_group_names:
try:
response = client.azure_firewall_ip_group_get_request(ip_group_name=ip_group)
state = dict_safe_get(response, ["properties", "provisioningState"], '')
if scheduled and state not in ["Succeeded", "Failed"]:
# schedule next poll
scheduled_command = create_scheduled_command(command_name='azure-firewall-ip-group-get',
interval=interval, timeout=timeout,
ip_group_names=ip_group)
# result with scheduled_command only - no update to the war room
command_results_list.append(CommandResults(scheduled_command=scheduled_command,
readable_output=generate_polling_readable_message(
resource_type_name="IP-Group",
resource_name=ip_group)))
else:
command_results_list.append(
generate_ip_group_command_output(response, readable_header=f'{ip_group} IP Group Information:'))
except Exception as exception:
error = CommandResults(
readable_output=f'An error occurred while retrieving {ip_group}.\n {exception}'
)
command_results_list.append(error)
return command_results_list
def azure_firewall_ip_group_delete_command(client: AzureFirewallClient, args: Dict[str, Any]) -> list:
"""
Delete IP group resource.
Args:
client (AzureFirewallClient): Azure Firewall API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw response for XSOAR.
"""
ip_group_names = argToList(args.get('ip_group_names'))
command_results_list: List[CommandResults] = []
for ip_group in ip_group_names:
try:
response = client.azure_firewall_ip_group_delete_request(ip_group_name=ip_group)
if response.status_code == 202:
readable_output = f'IP Group {ip_group} delete operation accepted and will complete asynchronously.'
else:
readable_output = f'IP Group {ip_group} deleted successfully.'
command_results_list.append(CommandResults(
readable_output=readable_output
))
except Exception as exception:
error = CommandResults(
readable_output=f'An error occurred while deleting {ip_group}.\n {exception}'
)
command_results_list.append(error)
return command_results_list
# --Authorization Commands--
def start_auth(client: AzureFirewallClient) -> CommandResults:
"""
Start the authorization process.
Args:
client (AzureFirewallClient): Azure Firewall API client.
Returns:
CommandResults: Authentication guidelines.
"""
result = client.ms_client.start_auth('!azure-firewall-auth-complete')
return CommandResults(readable_output=result)
def complete_auth(client: AzureFirewallClient) -> str:
"""
Complete authorization process.
Args:
client (AzureFirewallClient): Azure Firewall API client.
Returns:
str: Informative message.
"""
client.ms_client.get_access_token()
return 'Authorization completed successfully.'
def test_connection(client: AzureFirewallClient) -> str:
"""
Test connectivity to Azure.
Args:
client (AzureFirewallClient): Azure Firewall API client.
Returns:
str: Informative message.
"""
try:
client.ms_client.get_access_token()
except Exception as err:
return f'Authorization Error: \n{err}'
return 'Success!'
def reset_auth() -> str:
"""
Reset Azure auth properties.
Returns:
str: Informative message.
"""
set_integration_context({})
return 'Authorization was reset successfully. Run **!azure-firewall-auth-start** to start' \
' the authentication process.'
def main() -> None:
params: Dict[str, Any] = demisto.params()
args: Dict[str, Any] = demisto.args()
verify_certificate: bool = not params.get('insecure', False)
proxy = params.get('proxy', False)
api_version = params.get('api_version')
subscription_id = params['subscription_id']['password']
resource_group = params['resource_group']
client_id = params['client_id']
client_secret = dict_safe_get(params, ['client_secret', 'password'])
tenant_id = dict_safe_get(params, ['tenant_id', 'password'])
certificate_thumbprint = params.get('certificate_thumbprint')
private_key = params.get('private_key')
if tenant_id:
if not client_secret and (
(private_key and not certificate_thumbprint) or (certificate_thumbprint and not private_key)):
raise DemistoException(
'When Tenant ID is provided, either Client Secret or Certificate Thumbprint and Private Key must be provided.')
command = demisto.command()
demisto.debug(f'Command being called is {command}')
try:
requests.packages.urllib3.disable_warnings()
client: AzureFirewallClient = AzureFirewallClient(
subscription_id=subscription_id,
resource_group=resource_group,
client_id=client_id,
api_version=api_version,
verify=verify_certificate,
proxy=proxy,
client_secret=client_secret,
tenant_id=tenant_id,
certificate_thumbprint=certificate_thumbprint,
private_key=private_key)
commands = {
'azure-firewall-list': azure_firewall_list_command,
'azure-firewall-get': azure_firewall_get_command,
'azure-firewall-rule-collection-list': azure_firewall_rules_collection_list_command,
'azure-firewall-rule-list': azure_firewall_rules_list_command,
'azure-firewall-rule-get': azure_firewall_rule_get_command,
'azure-firewall-policy-create': azure_firewall_policy_create_command,
'azure-firewall-policy-update': azure_firewall_policy_update_command,
'azure-firewall-policy-get': azure_firewall_policy_get_command,
'azure-firewall-policy-delete': azure_firewall_policy_delete_command,
'azure-firewall-policy-list': azure_firewall_policy_list_command,
'azure-firewall-policy-attach': azure_firewall_policy_attach_command,
'azure-firewall-policy-detach': azure_firewall_policy_remove_command,
'azure-firewall-network-rule-collection-create': azure_firewall_network_rule_collection_create_command,
'azure-firewall-network-rule-collection-update': azure_firewall_network_rule_collection_update_command,
'azure-firewall-network-rule-collection-delete': azure_firewall_network_rule_collection_delete_command,
'azure-firewall-network-rule-create': azure_firewall_network_rule_create_command,
'azure-firewall-network-rule-update': azure_firewall_network_rule_update_command,
'azure-firewall-network-rule-delete': azure_firewall_network_rule_remove_command,
'azure-firewall-service-tag-list': azure_firewall_service_tag_list_command,
'azure-firewall-ip-group-create': azure_firewall_ip_group_create_command,
'azure-firewall-ip-group-update': azure_firewall_ip_group_update_command,
'azure-firewall-ip-group-list': azure_firewall_ip_group_list_command,
'azure-firewall-ip-group-get': azure_firewall_ip_group_get_command,
'azure-firewall-ip-group-delete': azure_firewall_ip_group_delete_command,
}
if command == 'test-module':
return_results(
'The test module is not functional, '
'run the azure-firewall-auth-start command instead.')
if command == 'azure-firewall-auth-start':
return_results(start_auth(client))
elif command == 'azure-firewall-auth-complete':
return_results(complete_auth(client))
elif command == 'azure-firewall-auth-test':
return_results(test_connection(client))
elif command == 'azure-firewall-auth-reset':
return_results(reset_auth())
elif command in commands:
return_results(commands[command](client, args))
else:
raise NotImplementedError(f'{command} command is not implemented.')
except Exception as e:
return_error(str(e))
from MicrosoftApiModule import * # noqa: E402
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Integrations/AzureFirewall/AzureFirewall_test.py
```python
import copy
import pytest
from unittest.mock import Mock
from CommonServerPython import *
SUBSCRIPTION_ID = "sub_id"
RESOURCE_GROUP_NAME = "group_name"
BASE_URL = f'https://management.azure.com/subscriptions/{SUBSCRIPTION_ID}' \
f'/resourceGroups/{RESOURCE_GROUP_NAME}/providers/Microsoft.Network'
CLIENT_ID = "XXXX"
ScheduledCommand.raise_error_if_not_supported = Mock()
def load_mock_response(file_path: str) -> str:
"""
Load mock file that simulates an API response.
Args:
file_path (str): Path of the mock response JSON file to return.
Returns:
str: Mock file content.
"""
with open(file_path, mode='r', encoding='utf-8') as mock_file:
return mock_file.read()
def get_azure_access_token_mock() -> dict:
"""
Mock Azure access token object.
Returns:
dict: Azure access token mock.
"""
return {
'access_token': '<PASSWORD>',
'expires_in': 3595,
'refresh_token': '<PASSWORD>',
}
def get_client_mock():
"""
Get API Client mock.
Returns:
AzureFirewallClient: API Client
"""
from AzureFirewall import AzureFirewallClient
return AzureFirewallClient(
subscription_id=SUBSCRIPTION_ID,
resource_group=RESOURCE_GROUP_NAME,
client_id=CLIENT_ID,
api_version='2021-03-01',
verify=False,
proxy=False)
def authorization_mock(requests_mock):
"""
Azure authorization API request mock.
"""
authorization_url = 'https://login.microsoftonline.com/organizations/oauth2/v2.0/token'
requests_mock.post(authorization_url, json=get_azure_access_token_mock())
def test_azure_firewall_list_command(requests_mock):
"""
Scenario: List azure firewalls in resource group or subscription.
Given:
- User has provided valid credentials.
When:
- azure-firewall-list called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
-Ensure the firewall name expected is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_list_command
authorization_mock(requests_mock)
client = get_client_mock()
url = f'{BASE_URL}/azureFirewalls'
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_list.json'))
requests_mock.get(url, json=mock_response)
result = azure_firewall_list_command(client, {'resource': 'resource_group'})
assert len(result.outputs) == 1
assert result.outputs_prefix == 'AzureFirewall.Firewall'
assert result.outputs[0].get('name') == 'xsoar-firewall'
def test_azure_firewall_get_command(requests_mock):
"""
Scenario: Retrieve azure firewall information.
Given:
- User has provided valid credentials.
When:
- azure-firewall-get called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the firewall name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_get_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = dict(firewall_names=firewall_name)
result = azure_firewall_get_command(client, command_arguments)
assert len(result[0].outputs) == 1
assert result[0].outputs_prefix == 'AzureFirewall.Firewall'
assert result[0].outputs[0].get('name') == firewall_name
def test_azure_firewall_rules_collection_list_command_for_firewall(requests_mock):
"""
Scenario: List collection rules in firewall.
Given:
- User has provided valid credentials.
When:
- azure-firewall-rule-collection-list called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the firewall name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_rules_collection_list_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = dict(firewall_name=firewall_name, rule_type="application_rule")
result = azure_firewall_rules_collection_list_command(client, command_arguments)
assert len(result.outputs) == 1
assert result.outputs_key_field == 'id'
assert result.outputs_prefix == 'AzureFirewall.RuleCollection'
assert result.outputs[0].get('name') == "my-app-collection"
assert dict_safe_get(result.outputs[0], ["properties", "rules"])[0].get("name") == "my-app-rule-1"
def test_azure_firewall_rules_collection_list_command_for_policy(requests_mock):
"""
Scenario: List collection rules in policy.
Given:
- User has provided valid credentials.
When:
- azure-firewall-rule-collection-list called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the rule collection name searched is the same as in the context returned.
- Ensure the rule collection key (type) searched is the same as in the context returned.
- Ensure the rule type (type) searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_rules_collection_list_command, get_policy_rule_collection_name, \
get_policy_rule_name
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-firewall'
url = f'{BASE_URL}/firewallPolicies/{policy_name}/ruleCollectionGroups'
mock_response = json.loads(load_mock_response('test_data/policy/policy_rule_collection_list.json'))
requests_mock.get(url, json=mock_response)
rule_type = "application_rule"
command_arguments = dict(policy=policy_name, rule_type=rule_type)
result = azure_firewall_rules_collection_list_command(client, command_arguments)
collection_key = get_policy_rule_collection_name(rule_type=rule_type)
rule_key = get_policy_rule_name(rule_type=rule_type)
assert len(result.outputs) == 1
assert result.outputs_key_field == 'id'
assert result.outputs_prefix == 'AzureFirewall.RuleCollection'
assert result.outputs[0].get('name') == "DefaultApplicationRuleCollectionGroup"
assert dict_safe_get(result.outputs[0], ["properties", "ruleCollections"])[0].get("rules")[0].get(
'ruleType') == rule_key
assert dict_safe_get(result.outputs[0], ["properties", "ruleCollections"])[0].get(
"ruleCollectionType") == collection_key
def test_azure_firewall_rules_list_command_for_policy(requests_mock):
"""
Scenario: List rules in policy.
Given:
- User has provided valid credentials.
When:
- azure-firewall-rule-list called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the rule name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_rules_list_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-firewall'
collection_name = "DefaultApplicationRuleCollectionGroup"
url = f'{BASE_URL}/firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_rule_list.json'))
requests_mock.get(url, json=mock_response)
command_arguments = dict(policy=policy_name, collection_name=collection_name)
result = azure_firewall_rules_list_command(client, command_arguments)
assert len(result.outputs) == 1
assert result.outputs_key_field == 'name'
assert result.outputs_prefix == 'AzureFirewall.Rule'
assert result.outputs[0].get('name') == "my-app-rule-1"
def test_azure_firewall_rules_list_command_for_firewall(requests_mock):
"""
Scenario: List rules in firewall.
Given:
- User has provided valid credentials.
When:
- azure-firewall-rule-list called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the rule name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_rules_list_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
collection_name = "my-app-collection"
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = dict(firewall_name=firewall_name, collection_name=collection_name, rule_type="application_rule")
result = azure_firewall_rules_list_command(client, command_arguments)
assert len(result.outputs) == 1
assert result.outputs_key_field == 'name'
assert result.outputs_prefix == 'AzureFirewall.Rule'
assert result.outputs[0].get('name') == "my-app-rule-1"
def test_azure_firewall_rules_get_command_for_firewall(requests_mock):
"""
Scenario: Retrieve rule information in firewall.
Given:
- User has provided valid credentials.
When:
- azure-firewall-rule-get called.
Then:
- Ensure outputs prefix is correct.
- Ensure the rule name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_rule_get_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
collection_name = "my-app-collection"
rule_name = "my-app-rule-1"
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = dict(firewall_name=firewall_name, collection_name=collection_name, rule_type="application_rule",
rule_name=rule_name)
result = azure_firewall_rule_get_command(client, command_arguments)
assert result.outputs_key_field == 'name'
assert result.outputs_prefix == 'AzureFirewall.Rule'
assert result.outputs.get('name') == rule_name
def test_azure_firewall_rule_get_command_for_policy(requests_mock):
"""
Scenario: Retrieve rule information in policy.
Given:
- User has provided valid credentials.
When:
- azure-firewall-rule-get called.
Then:
- Ensure outputs prefix is correct.
- Ensure the rule name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_rule_get_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-firewall'
collection_name = "DefaultApplicationRuleCollectionGroup"
rule_name = "my-app-rule-1"
url = f'{BASE_URL}/firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_rule_list.json'))
requests_mock.get(url, json=mock_response)
command_arguments = dict(policy=policy_name, collection_name=collection_name, rule_name=rule_name)
result = azure_firewall_rule_get_command(client, command_arguments)
assert result.outputs_key_field == 'name'
assert result.outputs_prefix == 'AzureFirewall.Rule'
assert result.outputs.get('name') == "my-app-rule-1"
def test_azure_firewall_policy_create_command(requests_mock):
"""
Scenario: Create firewall policy.
Given:
- User has provided valid credentials.
When:
- azure-firewall-policy-create called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the policy name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_policy_create_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_create.json'))
requests_mock.put(url, json=mock_response)
command_arguments = dict(policy_name=policy_name, threat_intelligence_mode="Turned-off", location="eastus",
tier="Standard", enable_proxy="False")
result = azure_firewall_policy_create_command(client, command_arguments)
assert len(result.outputs) == 1
assert result.outputs_prefix == 'AzureFirewall.Policy'
assert result.outputs[0].get('name') == policy_name
def test_azure_firewall_policy_update_command(requests_mock):
"""
Scenario: Update firewall policy.
Given:
- User has provided valid credentials.
When:
- azure-firewall-policy-update called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the policy name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_policy_update_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_get.json'))
requests_mock.get(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/policy/policy_update.json'))
requests_mock.put(url, json=mock_response)
command_arguments = {
'base_policy_id': '/firewallPolicies/my-policy',
'domains': 'microsoft.com', 'enable_proxy': 'True',
'ips': '192.168.127.12', 'policy_name': policy_name, 'threat_intelligence_mode': 'Alert'}
result = azure_firewall_policy_update_command(client, command_arguments)
assert len(result.outputs) == 1
assert result.outputs_prefix == 'AzureFirewall.Policy'
assert result.outputs[0].get('name') == policy_name
def test_azure_firewall_policy_list_command(requests_mock):
"""
Scenario: List policy in resource group or subscription.
Given:
- User has provided valid credentials.
When:
- azure-firewall-policy-list called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the policy name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_policy_list_command
authorization_mock(requests_mock)
client = get_client_mock()
url = f'{BASE_URL}/firewallPolicies'
mock_response = json.loads(load_mock_response('test_data/policy/policy_list.json'))
requests_mock.get(url, json=mock_response)
result = azure_firewall_policy_list_command(client, {})
assert len(result.outputs) == 1
assert result.outputs_prefix == 'AzureFirewall.Policy'
assert result.outputs[0].get('name') == "xsoar-policy"
def test_azure_firewall_policy_get_command(requests_mock):
"""
Scenario: Retrieve policy information.
Given:
- User has provided valid credentials.
When:
- azure-firewall-policy-get called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the policy name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_policy_get_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = dict(policy_names=policy_name)
result = azure_firewall_policy_get_command(client, command_arguments)
assert len(result) == 1
assert len(result[0].outputs) == 1
assert result[0].outputs_prefix == 'AzureFirewall.Policy'
assert result[0].outputs[0].get('name') == policy_name
def test_azure_firewall_policy_delete_command(requests_mock):
"""
Scenario: Delete policy resource.
Given:
- User has provided valid credentials.
When:
- azure-firewall-policy-delete called.
Then:
- Ensure that the output is empty (None).
- Ensure readable output message content.
"""
from AzureFirewall import azure_firewall_policy_delete_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
requests_mock.delete(url, status_code=202)
command_arguments = dict(policy_names=policy_name)
result = azure_firewall_policy_delete_command(client, command_arguments)
assert len(result) == 1
assert result[0].outputs is None
assert result[0].outputs_prefix is None
assert result[0].readable_output == f'Policy {policy_name} ' \
f'delete operation accepted and will complete asynchronously.'
def test_azure_firewall_policy_attach_command(requests_mock):
"""
Scenario: Attach policy to firewall.
Given:
- User has provided valid credentials.
When:
- azure-firewall-policy-attach called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the firewall name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_policy_attach_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_subnet_public_ip_attach.json'))
requests_mock.put(url, json=mock_response)
command_arguments = {'firewall_names': firewall_name, 'policy_id': '/firewallPolicies/xsoar-platform-policy'}
result = azure_firewall_policy_attach_command(client, command_arguments)
assert len(result) == 1
assert len(result[0].outputs) == 1
assert result[0].outputs_prefix == 'AzureFirewall.Firewall'
assert result[0].outputs[0].get('name') == firewall_name
def test_azure_firewall_policy_remove_command(requests_mock):
"""
Scenario: Remove policy from firewall.
Given:
- User has provided valid credentials.
When:
- azure-firewall-policy-remove called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the firewall name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_policy_remove_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_subnet_public_ip_remove.json'))
requests_mock.put(url, json=mock_response)
command_arguments = {'firewall_names': firewall_name}
result = azure_firewall_policy_remove_command(client, command_arguments)
assert len(result) == 1
assert len(result[0].outputs) == 1
assert result[0].outputs_prefix == 'AzureFirewall.Firewall'
assert result[0].outputs[0].get('name') == firewall_name
def test_azure_firewall_ip_group_create_command(requests_mock):
"""
Scenario: Create IP group resource.
Given:
- User has provided valid credentials.
When:
- azure-firewall-ip-group-create called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the ip-group name created is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_ip_group_create_command
authorization_mock(requests_mock)
client = get_client_mock()
ip_group_name = 'xsoar-ip-group'
url = f'{BASE_URL}/ipGroups/{ip_group_name}'
mock_response = json.loads(load_mock_response('test_data/ip_group/ip_group_create.json'))
requests_mock.put(url, json=mock_response)
command_arguments = {'ip_group_name': ip_group_name, 'location': 'eastus', 'ips': '192.168.127.12,192.168.127.12'}
result = azure_firewall_ip_group_create_command(client, command_arguments)
assert len(result.outputs) == 1
assert result.outputs_prefix == 'AzureFirewall.IPGroup'
assert result.outputs[0].get('name') == ip_group_name
def test_azure_firewall_ip_group_update_command(requests_mock):
"""
Scenario: Update IP group. Add or remove IPs from the group.
Given:
- User has provided valid credentials.
When:
- azure-firewall-ip-group-update called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the ip-group name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_ip_group_update_command
authorization_mock(requests_mock)
client = get_client_mock()
ip_group_name = 'xsoar-ip-group'
url = f'{BASE_URL}/ipGroups/{ip_group_name}'
mock_response = json.loads(load_mock_response('test_data/ip_group/ip_group_get.json'))
requests_mock.get(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/ip_group/ip_group_update.json'))
requests_mock.put(url, json=mock_response)
ips_to_add = '192.168.127.12,192.168.127.12'
ips_to_remove = '192.168.127.12'
command_arguments = {'ip_group_name': ip_group_name, 'ips_to_add': ips_to_add, 'ips_to_remove': ips_to_remove}
result = azure_firewall_ip_group_update_command(client, command_arguments)
assert len(result.outputs) == 1
assert result.outputs_prefix == 'AzureFirewall.IPGroup'
assert result.outputs[0].get('name') == ip_group_name
def test_azure_firewall_ip_group_list_command(requests_mock):
"""
Scenario: List IP groups in resource group or subscription.
Given:
- User has provided valid credentials.
When:
- azure-firewall-ip-group-list called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the ip-group name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_ip_group_list_command
authorization_mock(requests_mock)
client = get_client_mock()
ip_group_name = 'xsoar-ip-group'
url = f'{BASE_URL}/ipGroups'
mock_response = json.loads(load_mock_response('test_data/ip_group/ip_group_list.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'resource': 'resource_group'}
result = azure_firewall_ip_group_list_command(client, command_arguments)
assert len(result.outputs) == 1
assert result.outputs_prefix == 'AzureFirewall.IPGroup'
assert result.outputs[0].get('name') == ip_group_name
def test_azure_firewall_ip_group_get_command(requests_mock):
"""
Scenario: List IP groups in resource group or subscription.
Given:
- User has provided valid credentials.
When:
- azure-firewall-ip-group-get called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the ip-group name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_ip_group_get_command
authorization_mock(requests_mock)
client = get_client_mock()
ip_group_name = 'xsoar-ip-group'
url = f'{BASE_URL}/ipGroups/{ip_group_name}'
mock_response = json.loads(load_mock_response('test_data/ip_group/ip_group_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'ip_group_names': ip_group_name}
result = azure_firewall_ip_group_get_command(client, command_arguments)
assert len(result) == 1
assert len(result[0].outputs) == 1
assert result[0].outputs_prefix == 'AzureFirewall.IPGroup'
assert result[0].outputs[0].get('name') == ip_group_name
def test_azure_firewall_ip_group_delete_command(requests_mock):
"""
Scenario: Delete IP group resource.
Given:
- User has provided valid credentials.
When:
- azure-firewall-policy-delete called.
Then:
- Ensure that the output is empty (None).
- Ensure readable output message content.
"""
from AzureFirewall import azure_firewall_ip_group_delete_command
authorization_mock(requests_mock)
client = get_client_mock()
ip_group_name = 'xsoar-ip-group'
url = f'{BASE_URL}/ipGroups/{ip_group_name}'
requests_mock.delete(url, status_code=202)
command_arguments = {'ip_group_names': ip_group_name}
result = azure_firewall_ip_group_delete_command(client, command_arguments)
assert len(result) == 1
assert result[0].outputs is None
assert result[0].outputs_prefix is None
assert result[0].readable_output == f'IP Group {ip_group_name} ' \
f'delete operation accepted and will complete asynchronously.'
def test_azure_firewall_network_rule_collection_create_command_for_firewall(requests_mock):
"""
Scenario: Create network rule collection in firewall.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-collection-create called.
Then:
- Ensure outputs prefix is correct.
- Ensure the firewall name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_collection_create_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(
load_mock_response('test_data/network_rule/firewall_network_rule_collection_create.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'action': 'Allow', 'collection_name': 'my-collection', 'collection_priority': '105',
'description': 'my-poc-collection', 'destination_ports': '8080',
'destination_type': 'ip_address',
'destinations': '192.168.127.12,192.168.127.12', 'firewall_name': firewall_name,
'protocols': 'UDP,TCP',
'rule_name': 'my-ip-rule', 'source_ips': '192.168.127.12,192.168.127.12',
'source_type': 'ip_address'}
result = azure_firewall_network_rule_collection_create_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Firewall'
assert result.outputs[0].get('name') == firewall_name
def test_azure_firewall_network_rule_collection_create_command_for_policy(requests_mock):
"""
Scenario: Create network rule collection in policy.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-collection-create called.
Then:
- Ensure outputs prefix is correct.
- Ensure the policy name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_collection_create_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
collection_name = "xsoar-collection"
url = f'{BASE_URL}/firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
mock_response = json.loads(
load_mock_response('test_data/network_rule/policy_network_rule_collection_create.json'))
requests_mock.put(url, json=mock_response)
requests_mock.get(url, status_code=404)
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'action': 'Allow', 'collection_name': collection_name, 'collection_priority': '109',
'description': 'my-poc-collection', 'destination_ports': '8080',
'destination_type': 'ip_address', 'destinations': '192.168.127.12,192.168.127.12',
'policy': policy_name, 'protocols': 'UDP,TCP', 'rule_name': 'my-ip-rule',
'source_ips': '192.168.127.12,192.168.127.12', 'source_type': 'ip_address'}
result = azure_firewall_network_rule_collection_create_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Policy'
assert result.outputs[0].get('name') == policy_name
def test_azure_firewall_network_rule_collection_create_command_invalid_arguments(requests_mock):
"""
Scenario: Create network rule collection in firewall. The user provided invalid or missing arguments.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-collection-create called.
Then:
- Ensure that exception is raised.
"""
from AzureFirewall import azure_firewall_network_rule_collection_create_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
command_arguments = {'action': 'Allow', 'collection_priority': '105',
'description': 'my-poc-collection', 'destination_ports': '8080',
'destination_type': 'ip_address', 'firewall_name': firewall_name,
'protocols': 'UDP,TCP', 'source_ips': '192.168.127.12,192.168.127.12',
'source_type': 'ip_address'}
with pytest.raises(Exception):
invalid_arguments = copy.deepcopy(command_arguments)
invalid_arguments['protocols'] = 'test'
azure_firewall_network_rule_collection_create_command(client, invalid_arguments)
with pytest.raises(Exception):
invalid_arguments = copy.deepcopy(command_arguments)
invalid_arguments['source_type'] = 'test'
azure_firewall_network_rule_collection_create_command(client, invalid_arguments)
with pytest.raises(Exception):
invalid_arguments = copy.deepcopy(command_arguments)
invalid_arguments['destination_type'] = 'test'
azure_firewall_network_rule_collection_create_command(client, invalid_arguments)
with pytest.raises(Exception):
invalid_arguments = copy.deepcopy(command_arguments)
invalid_arguments['source_type'] = 'ip_address'
del invalid_arguments['source_ips']
azure_firewall_network_rule_collection_create_command(client, invalid_arguments)
with pytest.raises(Exception):
invalid_arguments = copy.deepcopy(command_arguments)
invalid_arguments['source_type'] = 'ip_group'
del invalid_arguments['source_ips']
azure_firewall_network_rule_collection_create_command(client, invalid_arguments)
def test_azure_firewall_network_rule_create_command_for_firewall(requests_mock):
"""
Scenario: Create network rule in firewall rule collection.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-create called.
Then:
- Ensure outputs prefix is correct.
- Ensure the firewall name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_create_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(
load_mock_response('test_data/network_rule/firewall_network_rule_collection_create.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'collection_name': 'my-network-rule-collection',
'description': 'my-poc-collection', 'destination_ports': '8080',
'destination_type': 'ip_address',
'destinations': '192.168.127.12,192.168.127.12', 'firewall_name': firewall_name,
'protocols': 'UDP,TCP',
'rule_name': 'my-ip-rule', 'source_ips': '192.168.127.12,192.168.127.12',
'source_type': 'ip_address'}
result = azure_firewall_network_rule_create_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Firewall'
assert result.outputs[0].get('name') == firewall_name
def test_azure_firewall_network_rule_create_command_for_policy(requests_mock):
"""
Scenario: Create network rule in policy rule collection.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-create called.
Then:
- Ensure outputs prefix is correct.
- Ensure the policy name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_create_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
collection_name = "xsoar-collection"
url = f'{BASE_URL}/firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
mock_response = json.loads(
load_mock_response('test_data/network_rule/policy_network_rule_collection_create.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(
load_mock_response('test_data/policy/policy_rule_collection_get.json'))
requests_mock.get(url, json=mock_response)
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'collection_name': collection_name,
'description': 'my-poc-collection', 'destination_ports': '8080',
'destination_type': 'ip_address', 'destinations': '192.168.127.12,192.168.127.12',
'policy': policy_name, 'protocols': 'UDP,TCP', 'rule_name': 'my-rule',
'source_ips': '192.168.127.12,192.168.127.12', 'source_type': 'ip_address'}
result = azure_firewall_network_rule_create_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Policy'
assert result.outputs[0].get('name') == policy_name
def test_azure_firewall_network_rule_collection_update_command_for_firewall(requests_mock):
"""
Scenario: Update network rule collection in firewall.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-collection-update called.
Then:
- Ensure outputs prefix is correct.
- Ensure the firewall name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_collection_update_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(
load_mock_response('test_data/network_rule/firewall_network_rule_collection_create.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'action': 'Deny', 'collection_name': 'my-network-rule-collection',
'firewall_name': firewall_name, 'priority': '201'}
result = azure_firewall_network_rule_collection_update_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Firewall'
assert result.outputs[0].get('name') == firewall_name
def test_azure_firewall_network_rule_collection_update_command_for_policy(requests_mock):
"""
Scenario: Update network rule collection in policy.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-collection-update called.
Then:
- Ensure outputs prefix is correct.
- Ensure the policy name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_collection_update_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
collection_name = "xsoar-collection"
url = f'{BASE_URL}/firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
mock_response = json.loads(
load_mock_response('test_data/network_rule/policy_network_rule_collection_create.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(
load_mock_response('test_data/policy/policy_rule_collection_get.json'))
requests_mock.get(url, json=mock_response)
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'action': 'Deny', 'collection_name': collection_name,
'policy': policy_name, 'priority': '201'}
result = azure_firewall_network_rule_collection_update_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Policy'
assert result.outputs[0].get('name') == policy_name
def test_azure_firewall_network_rule_collection_delete_command_for_firewall(requests_mock):
"""
Scenario: Delete network rule collection from firewall.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-collection-delete called.
Then:
- Ensure outputs prefix is correct.
- Ensure the firewall name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_collection_delete_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
collection_name = 'my-network-rule-collection'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(
load_mock_response('test_data/firewall/firewall_update.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'collection_name': collection_name, 'firewall_name': firewall_name}
result = azure_firewall_network_rule_collection_delete_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Firewall'
assert result.outputs[0].get('name') == firewall_name
def test_azure_firewall_network_rule_collection_delete_command_for_policy(requests_mock):
"""
Scenario: Delete network rule collection from policy.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-collection-delete called.
Then:
- Ensure outputs prefix is correct.
- Ensure the policy name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_collection_delete_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
collection_name = "xsoar-collection"
url = f'{BASE_URL}/firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
requests_mock.delete(url, status_code=200)
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'action': 'Deny', 'collection_name': collection_name,
'policy': policy_name, 'priority': '201'}
result = azure_firewall_network_rule_collection_delete_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Policy'
assert result.outputs[0].get('name') == policy_name
def test_azure_firewall_network_rule_remove_command_for_firewall(requests_mock):
"""
Scenario: Remove network rule from rules collection in firewall.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-delete called.
Then:
- Ensure outputs prefix is correct.
- Ensure the firewall name updated is the same as in the context returned.
- Ensure that the output is empty (None) for non-exists rules.
"""
from AzureFirewall import azure_firewall_network_rule_remove_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
collection_name = 'my-network-rule-collection'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(
load_mock_response('test_data/firewall/firewall_update.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'collection_name': collection_name, 'firewall_name': firewall_name,
'rule_names': 'my-network-rule,not-exists-rule'}
result = azure_firewall_network_rule_remove_command(client, command_arguments)
assert result[0].outputs is None
assert result[0].outputs_prefix is None
assert result[0].readable_output == 'Rule not-exists-rule is not exists.'
assert result[1].outputs[0].get("name") == firewall_name
assert result[1].outputs_prefix == "AzureFirewall.Firewall"
def test_azure_firewall_network_rule_remove_command_for_policy(requests_mock):
"""
Scenario: Remove network rule from rules collection in policy.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-delete called.
Then:
- Ensure outputs prefix is correct.
- Ensure the policy name updated is the same as in the context returned.
- Ensure that the output is empty (None) for non-exists rules.
"""
from AzureFirewall import azure_firewall_network_rule_remove_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
collection_name = "xsoar-collection"
url = f'{BASE_URL}/firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
mock_response = json.loads(
load_mock_response('test_data/network_rule/policy_network_rule_collection_create.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(
load_mock_response('test_data/policy/policy_rule_collection_get.json'))
requests_mock.get(url, json=mock_response)
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'collection_name': collection_name, 'policy': policy_name,
'rule_names': 'my-ip-rule,not-exists-rule'}
result = azure_firewall_network_rule_remove_command(client, command_arguments)
assert result[0].outputs is None
assert result[0].outputs_prefix is None
assert result[0].readable_output == 'Rule not-exists-rule is not exists.'
assert result[1].outputs[0].get("name") == policy_name
assert result[1].outputs_prefix == 'AzureFirewall.Policy'
def test_azure_firewall_network_rule_update_command_policy(requests_mock):
"""
Scenario: Update network rule in policy rule collection.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-update called.
Then:
- Ensure outputs prefix is correct.
- Ensure the policy name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_update_command
authorization_mock(requests_mock)
client = get_client_mock()
policy_name = 'xsoar-policy'
collection_name = "xsoar-collection"
url = f'{BASE_URL}/firewallPolicies/{policy_name}/ruleCollectionGroups/{collection_name}'
mock_response = json.loads(
load_mock_response('test_data/network_rule/policy_network_rule_collection_create.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(
load_mock_response('test_data/policy/policy_rule_collection_get.json'))
requests_mock.get(url, json=mock_response)
url = f'{BASE_URL}/firewallPolicies/{policy_name}'
mock_response = json.loads(load_mock_response('test_data/policy/policy_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'collection_name': collection_name, 'description': 'new-description',
'destination_ports': '8085',
'destination_type': 'ip_address', 'destinations': '192.168.127.12', 'new_rule_name': 'new-name',
'policy': policy_name, 'protocols': 'UDP', 'rule_name': 'my-ip-rule',
'source_ips': '192.168.127.12',
'source_type': 'ip_address'}
result = azure_firewall_network_rule_update_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Policy'
assert result.outputs[0].get('name') == policy_name
def test_azure_firewall_network_rule_update_command_for_firewall(requests_mock):
"""
Scenario: Update network rule in firewall rule collection.
Given:
- User has provided valid credentials.
When:
- azure-firewall-network-rule-update called.
Then:
- Ensure outputs prefix is correct.
- Ensure the firewall name updated is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_network_rule_update_command
authorization_mock(requests_mock)
client = get_client_mock()
firewall_name = 'xsoar-firewall'
collection_name = 'my-network-rule-collection'
url = f'{BASE_URL}/azureFirewalls/{firewall_name}'
mock_response = json.loads(
load_mock_response('test_data/firewall/firewall_update.json'))
requests_mock.put(url, json=mock_response)
mock_response = json.loads(load_mock_response('test_data/firewall/firewall_get.json'))
requests_mock.get(url, json=mock_response)
command_arguments = {'collection_name': collection_name, 'description': 'new-description',
'destination_ports': '8085', 'firewall_name': firewall_name,
'destination_type': 'ip_address', 'destinations': '192.168.127.12', 'new_rule_name': 'new-name',
'protocols': 'UDP', 'rule_name': 'my-network-rule',
'source_ips': '192.168.127.12',
'source_type': 'ip_address'}
result = azure_firewall_network_rule_update_command(client, command_arguments)
assert result.outputs_prefix == 'AzureFirewall.Firewall'
assert result.outputs[0].get('name') == firewall_name
def test_azure_firewall_service_tag_list_command(requests_mock):
"""
Scenario: Retrieve service tags information.
Given:
- User has provided valid credentials.
When:
- azure-service-tag-list called.
Then:
- Ensure 1 result is returned.
- Ensure outputs prefix is correct.
- Ensure the service tag name searched is the same as in the context returned.
"""
from AzureFirewall import azure_firewall_service_tag_list_command
authorization_mock(requests_mock)
client = get_client_mock()
location = "eastus"
url = f'https://management.azure.com/subscriptions/{SUBSCRIPTION_ID}/providers' \
f'/Microsoft.Network/locations/{location}/serviceTagDetails'
mock_response = json.loads(load_mock_response('test_data/network_rule/service_tag_list.json'))
requests_mock.get(url, json=mock_response)
result = azure_firewall_service_tag_list_command(client, {'location': location, "limit": 1})
assert len(result.outputs) == 1
assert result.outputs_prefix == 'AzureFirewall.ServiceTag'
assert result.outputs[0].get('name') == 'ActionGroup'
```
#### File: Integrations/AzureSQLManagement/AzureSQLManagement.py
```python
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
import urllib3
import copy
# Disable insecure warnings
urllib3.disable_warnings()
''' CONSTANTS '''
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
API_VERSION = '2019-06-01-preview'
''' CLIENT CLASS '''
class Client:
"""Client class to interact with the service API
"""
@logger
def __init__(self, app_id, subscription_id, resource_group_name, verify, proxy,
azure_ad_endpoint='https://login.microsoftonline.com'):
self.resource_group_name = resource_group_name
if '@' in app_id:
app_id, refresh_token = app_id.split('@')
integration_context = get_integration_context()
integration_context.update(current_refresh_token=refresh_token)
set_integration_context(integration_context)
base_url = f'https://management.azure.com/subscriptions/{subscription_id}'
client_args = {
'self_deployed': True, # We always set the self_deployed key as True because when not using a self
# deployed machine, the DEVICE_CODE flow should behave somewhat like a self deployed
# flow and most of the same arguments should be set, as we're !not! using OProxy.
'auth_id': app_id,
'token_retrieval_url': 'https://login.microsoftonline.com/organizations/oauth2/v2.0/token',
'grant_type': DEVICE_CODE, # disable-secrets-detection
'base_url': base_url,
'verify': verify,
'proxy': proxy,
'resource': 'https://management.core.windows.net', # disable-secrets-detection
'scope': 'https://management.azure.com/user_impersonation offline_access user.read',
'ok_codes': (200, 201, 202, 204),
'azure_ad_endpoint': azure_ad_endpoint
}
self.ms_client = MicrosoftClient(**client_args)
@logger
def http_request(self, method: str, url_suffix: str = None, full_url: str = None, params: dict = {},
data: dict = None, resp_type: str = 'json') -> requests.Response:
if not full_url:
params['api-version'] = API_VERSION
return self.ms_client.http_request(method=method,
url_suffix=url_suffix,
full_url=full_url,
json_data=data,
params=params,
resp_type=resp_type)
@logger
def azure_sql_servers_list(self):
return self.http_request('GET', '/providers/Microsoft.Sql/servers')
@logger
def azure_sql_db_list(self, server_name: str):
return self.http_request('GET', f'resourceGroups/{self.resource_group_name}/providers/Microsoft.Sql/servers/'
f'{server_name}/databases')
@logger
def azure_sql_db_audit_policy_list(self, server_name: str, db_name: str):
return self.http_request('GET', f'resourceGroups/{self.resource_group_name}/providers/Microsoft.Sql/servers/'
f'{server_name}/databases/{db_name}/auditingSettings')
@logger
def azure_sql_db_threat_policy_get(self, server_name: str, db_name: str):
return self.http_request('GET', f'resourceGroups/{self.resource_group_name}/providers/Microsoft.Sql/servers/'
f'{server_name}/databases/{db_name}/securityAlertPolicies/default')
@logger
def azure_sql_db_audit_policy_create_update(self, server_name: str, db_name: str,
state: str, audit_actions_groups: List[str],
is_azure_monitor_target_enabled: bool,
is_storage_secondary_key_in_use: bool,
queue_delay_ms: str, retention_days: str,
storage_account_access_key: str,
storage_account_subscription_id: str,
storage_endpoint: str):
properties = assign_params(state=state, auditActionsAndGroups=audit_actions_groups,
isAzureMonitorTargetEnabled=is_azure_monitor_target_enabled,
isStorageSecondaryKeyInUse=is_storage_secondary_key_in_use,
queueDelayMs=queue_delay_ms,
retentionDays=retention_days,
storageAccountAccessKey=storage_account_access_key,
storageAccountSubscriptionId=storage_account_subscription_id,
storageEndpoint=storage_endpoint)
request_body = {'properties': properties} if properties else {}
return self.http_request(method='PUT', url_suffix=f'resourceGroups/{self.resource_group_name}/providers'
f'/Microsoft.Sql/servers/{server_name}/databases/'
f'{db_name}/auditingSettings/default',
data=request_body)
def azure_sql_db_threat_policy_create_update(self, server_name: str, db_name: str, state: str,
disabled_alerts: List[str], email_account_admins: str,
email_addresses: List[str], retention_days: str,
storage_account_access_key: str,
use_server_default: str, storage_endpoint: str):
properties = assign_params(state=state,
retentionDays=retention_days,
storageAccountAccessKey=storage_account_access_key,
storageEndpoint=storage_endpoint,
disabledAlerts=disabled_alerts,
emailAccountAdmins=email_account_admins,
emailAddresses=email_addresses,
useServerDefault=use_server_default)
request_body = {'properties': properties} if properties else {}
return self.http_request(method='PUT', url_suffix=f'resourceGroups/{self.resource_group_name}/providers'
f'/Microsoft.Sql/servers/{server_name}/databases/'
f'{db_name}/securityAlertPolicies/default',
data=request_body)
@logger
def azure_sql_servers_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""azure-sql-servers-list command returns a list of all servers
Args:
client: AzureSQLManagement Client to use
limit: The maximum number of servers returned to the War Room. Default is 50.
offset: Offset in the data set. Default is 0.
Returns:
A ``CommandResults`` object that is then passed to ``return_results``,
that contains a list of all servers
"""
offset_int = int(args.get('offset', '0'))
limit_int = int(args.get('limit', '50'))
server_list_raw = client.azure_sql_servers_list()
server_list_fixed = copy.deepcopy(server_list_raw.get('value', '')[offset_int:(offset_int + limit_int)])
for server in server_list_fixed:
if properties := server.get('properties', {}):
server.update(properties)
del server['properties']
human_readable = tableToMarkdown(name='Servers List', t=server_list_fixed,
headerTransform=pascalToSpace, removeNull=True)
return CommandResults(
readable_output=human_readable,
outputs_prefix='AzureSQL.Server',
outputs_key_field='id',
outputs=server_list_fixed,
raw_response=server_list_raw
)
@logger
def azure_sql_db_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""azure-sql-db-list command returns a list of all databases for server
Args:
client: AzureSQLManagement Client to use
server_name: server name for which we want to receive list of databases
limit: The maximum number of databases returned to the War Room. Default
is 50.
offset: Offset in the data set. Default is 0.
Returns:
A ``CommandResults`` object that is then passed to ``return_results``,
that contains a list of all databases for server
"""
offset_int = int(args.get('offset', '0'))
limit_int = int(args.get('limit', '50'))
database_list_raw = client.azure_sql_db_list(args.get('server_name'))
database_list_fixed = copy.deepcopy(database_list_raw.get('value', '')[offset_int:(offset_int + limit_int)])
for db in database_list_fixed:
properties = db.get('properties', {})
if properties:
db.update(properties)
del db['properties']
human_readable = tableToMarkdown(name='Database List', t=database_list_fixed,
headers=['name', 'location', 'status', 'managedBy'],
headerTransform=pascalToSpace, removeNull=True)
return CommandResults(
readable_output=human_readable,
outputs_prefix='AzureSQL.DB',
outputs_key_field='id',
outputs=database_list_fixed,
raw_response=database_list_raw
)
@logger
def azure_sql_db_audit_policy_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""azure_sql_db_audit_policy_list command returns a list of auditing settings of a database
Args:
client: AzureSQLManagement Client to use
server_name: server name for which we want to receive list of auditing settings
db_name: database for which we want to receive list of auditing settings
limit: The maximum number of audit policies returned to the War Room. Default
is 50.
offset: Offset in the data set. Default is 0.
Returns:
A ``CommandResults`` object that is then passed to ``return_results``,
that contains a list of auditing settings of a database
"""
server_name = args.get('server_name')
db_name = args.get('db_name')
offset_int = int(args.get('offset', '0'))
limit_int = int(args.get('limit', '50'))
audit_list_raw = client.azure_sql_db_audit_policy_list(server_name, db_name)
audit_list_fixed = copy.deepcopy(audit_list_raw.get('value', '')[offset_int:(offset_int + limit_int)])
for db in audit_list_fixed:
db['serverName'] = server_name
db['databaseName'] = db_name
if properties := db.get('properties', {}):
db.update(properties)
del db['properties']
human_readable = tableToMarkdown(name='Database Audit Settings', t=audit_list_fixed,
headerTransform=pascalToSpace, removeNull=True)
return CommandResults(
readable_output=human_readable,
outputs_prefix='AzureSQL.DBAuditPolicy',
outputs_key_field='id',
outputs=audit_list_fixed,
raw_response=audit_list_raw
)
@logger
def azure_sql_db_audit_policy_create_update_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""azure_sql_db_audit_policy_create_update command upadates and creates audit policies related to the server
and database
Args:
client: AzureSQLManagement Client to use
server_name: server name for which we want to create or update auditing settings
db_name: database for which we want to create or update auditing settings
state: state of the policy
audit_actions_groups: Comma-separated Actions-Groups and Actions to audit.
is_azure_monitor_target_enabled: Is audit events are sent to Azure Monitor
is_storage_secondary_key_in_use: Is storageAccountAccessKey value is the storage's secondary key
queue_delay_ms: Time in milliseconds that can elapse before audit actions are forced
to be processed.
retention_days: Number of days to keep the policy in the audit logs.
storage_account_access_key: identifier key of the auditing storage account
storage_account_subscription_id: storage subscription Id
storage_endpoint: Storage endpoint.
Returns:
A ``CommandResults`` object that is then passed to ``return_results``,
that contains an updated audit policy
"""
server_name = args.get('server_name')
db_name = args.get('db_name')
state = args.get('state')
audit_actions_groups = argToList(args.get('audit_actions_groups', ''))
is_azure_monitor_target_enabled = args.get('is_azure_monitor_target_enabled', '')
is_storage_secondary_key_in_use = args.get('is_storage_secondary_key_in_use', '')
queue_delay_ms = args.get('queue_delay_ms', '')
retention_days = args.get('retention_days', '')
storage_account_access_key = args.get('storage_account_access_key', '')
storage_account_subscription_id = args.get('storage_account_subscription_id', '')
storage_endpoint = args.get('storage_endpoint', '')
raw_response = client.azure_sql_db_audit_policy_create_update(server_name=server_name, db_name=db_name, state=state,
audit_actions_groups=audit_actions_groups,
is_azure_monitor_target_enabled=is_azure_monitor_target_enabled,
is_storage_secondary_key_in_use=is_storage_secondary_key_in_use,
queue_delay_ms=queue_delay_ms,
retention_days=retention_days,
storage_account_access_key=storage_account_access_key,
storage_account_subscription_id=storage_account_subscription_id,
storage_endpoint=storage_endpoint)
fixed_response = copy.deepcopy(raw_response)
if properties := fixed_response.get('properties', {}):
fixed_response['serverName'] = server_name
fixed_response['databaseName'] = db_name
fixed_response.update(properties)
del fixed_response['properties']
human_readable = tableToMarkdown(name='Create Or Update Database Auditing Settings', t=fixed_response,
headerTransform=pascalToSpace, removeNull=True)
return CommandResults(
readable_output=human_readable,
outputs_prefix='AzureSQL.DBAuditPolicy',
outputs_key_field='id',
outputs=fixed_response,
raw_response=raw_response
)
@logger
def azure_sql_db_threat_policy_get_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""azure_sql_db_threat_policy_get command returns a threat detection policy of a database
Args:
client: AzureSQLManagement Client to use
server_name: server name for which we want to receive threat detection policies
db_name: database for which we want to receive threat detection policies
Returns:
A ``CommandResults`` object that is then passed to ``return_results``,
that contains a threat detection policy of a database
"""
server_name = args.get('server_name')
db_name = args.get('db_name')
threat_raw = client.azure_sql_db_threat_policy_get(server_name, db_name)
threat_fixed = copy.deepcopy(threat_raw)
if properties := threat_fixed.get('properties', {}):
threat_fixed['serverName'] = server_name
threat_fixed['databaseName'] = db_name
threat_fixed.update(properties)
del threat_fixed['properties']
human_readable = tableToMarkdown(name='Database Threat Detection Policies', t=threat_fixed,
headerTransform=pascalToSpace, removeNull=True)
return CommandResults(
readable_output=human_readable,
outputs_prefix='AzureSQL.DBThreatPolicy',
outputs_key_field='id',
outputs=threat_fixed,
raw_response=threat_raw
)
@logger
def azure_sql_db_threat_policy_create_update_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""azure_sql_db_audit_policy_create_update command upadates and creates threat policy related to the server
and database
Args:
client: AzureSQLManagement Client to use
server_name: server name for which we want to create or update auditing settings
db_name: database for which we want to create or update auditing settings
state: satate of the policy
disabled_alerts: Comma-separated list of alerts that are disabled, or "none" to
disable no alerts.
email_account_admins: The alert is sent to the account administrators.
email_addresses: Comma-separated list of e-mail addresses to which the alert is
sent.
retention_days: Number of days to keep the policy in the audit logs.
storage_account_access_key: identifier key of the auditing storage account
use_server_default: Whether to use the default server policy or not.
storage_endpoint: Storage endpoint.
Returns:
A ``CommandResults`` object that is then passed to ``return_results``,
that contains an updated threat policy
"""
server_name = args['server_name']
db_name = args['db_name']
state = args['state']
retention_days = args.get('retention_days', '')
email_account_admins = args.get('email_account_admins', '')
email_addresses = argToList(args.get('email_addresses', ''))
storage_account_access_key = args.get('storage_account_access_key', '')
use_server_default = args.get('use_server_default', '')
storage_endpoint = args.get('storage_endpoint', '')
disabled_alerts = [""] if 'None' in argToList(args.get('disabled_alerts', '')) \
else argToList(args.get('disabled_alerts', ''))
raw_response = client.azure_sql_db_threat_policy_create_update(server_name=server_name, db_name=db_name,
state=state,
retention_days=retention_days,
disabled_alerts=disabled_alerts,
email_account_admins=email_account_admins,
email_addresses=email_addresses,
storage_account_access_key=storage_account_access_key,
use_server_default=use_server_default,
storage_endpoint=storage_endpoint)
fixed_response = copy.deepcopy(raw_response)
if properties := fixed_response.get('properties', {}):
fixed_response['serverName'] = server_name
fixed_response['databaseName'] = db_name
fixed_response.update(properties)
del fixed_response['properties']
human_readable = tableToMarkdown(name='Create Or Update Database Threat Detection Policies', t=fixed_response,
headerTransform=pascalToSpace, removeNull=True)
return CommandResults(
readable_output=human_readable,
outputs_prefix='AzureSQL.DBThreatPolicy',
outputs_key_field='id',
outputs=fixed_response,
raw_response=raw_response
)
@logger
def test_connection(client: Client) -> CommandResults:
client.ms_client.get_access_token() # If fails, MicrosoftApiModule returns an error
return CommandResults(readable_output='✅ Success!')
@logger
def start_auth(client: Client) -> CommandResults:
result = client.ms_client.start_auth('!azure-sql-auth-complete')
return CommandResults(readable_output=result)
@logger
def complete_auth(client: Client) -> CommandResults:
client.ms_client.get_access_token()
return CommandResults(readable_output='✅ Authorization completed successfully.')
@logger
def reset_auth(client: Client) -> CommandResults:
set_integration_context({})
return CommandResults(readable_output='Authorization was reset successfully. You can now run '
'**!azure-sql-auth-start** and **!azure-sql-auth-complete**.')
''' MAIN FUNCTION '''
def main() -> None:
"""main function, parses params and runs command functions
"""
params = demisto.params()
command = demisto.command()
args = demisto.args()
demisto.debug(f'Command being called is {command}')
try:
client = Client(
app_id=params.get('app_id', ''),
subscription_id=params.get('subscription_id', ''),
resource_group_name=params.get('resource_group_name', ''),
verify=not params.get('insecure', False),
proxy=params.get('proxy', False),
azure_ad_endpoint=params.get('azure_ad_endpoint',
'https://login.microsoftonline.com') or 'https://login.microsoftonline.com'
)
if command == 'test-module':
return_error(
'Please run `!azure-sql-auth-start` and `!azure-sql-auth-complete` to log in. '
'You can validate the connection by running `!azure-sql-auth-test`\n '
'For more details press the (?) button.')
elif command == 'azure-sql-servers-list':
return_results(azure_sql_servers_list_command(client, args))
elif command == 'azure-sql-db-list':
return_results(azure_sql_db_list_command(client, args))
elif command == 'azure-sql-db-audit-policy-list':
return_results(azure_sql_db_audit_policy_list_command(client, args))
elif command == 'azure-sql-db-audit-policy-create-update':
return_results(azure_sql_db_audit_policy_create_update_command(client, args))
elif command == 'azure-sql-db-threat-policy-get':
return_results(azure_sql_db_threat_policy_get_command(client, args))
elif command == 'azure-sql-db-threat-policy-create-update':
return_results(azure_sql_db_threat_policy_create_update_command(client, args))
elif command == 'azure-sql-auth-start':
return_results(start_auth(client))
elif command == 'azure-sql-auth-complete':
return_results(complete_auth(client))
elif command == 'azure-sql-auth-reset':
return_results(reset_auth(client))
elif command == 'azure-sql-auth-test':
return_results(test_connection(client))
# Log exceptions and return errors
except Exception as e:
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
from MicrosoftApiModule import * # noqa: E402
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Integrations/BitcoinAbuse/BitcoinAbuse.py
```python
import demistomock as demisto # noqa: F401
from CSVFeedApiModule import *
from CommonServerPython import * # noqa: F401
# disable insecure warnings
urllib3.disable_warnings()
''' CONSTANTS '''
SERVER_URL = 'https://www.bitcoinabuse.com/api/'
ABUSE_TYPE_ID_TO_NAME: Dict[str, str] = {
'1': 'ransomware',
'2': 'darknet market',
'3': 'bitcoin tumbler',
'4': 'blackmail scam',
'5': 'sextortio',
'99': 'other'
}
READER_CONFIG = {
'fieldnames': ['id', 'address', 'abuse_type_id', 'abuse_type_other', 'abuser',
'description', 'from_country', 'from_country_code', 'created_at'],
'skip_first_line': True,
'indicator_type': 'Cryptocurrency Address',
'mapping': {
'Value': ('address', None, 'bitcoin:{}'),
'rawaddress': 'address',
'countryname': 'from_country',
'creationdate': 'created_at',
'description': 'description',
'abusetype': ('abuse_type_id', lambda abuse_type_id: ABUSE_TYPE_ID_TO_NAME.get(abuse_type_id))
}
}
ABUSE_TYPE_NAME_TO_ID = {
'ransomware': 1,
'darknet market': 2,
'bitcoin tumbler': 3,
'blackmail scam': 4,
'sextortio': 5,
'other': 99
}
class BitcoinAbuseClient(BaseClient):
FIRST_FETCH_INTERVAL_TO_URL_SUFFIX: Dict[str, str] = {
'Forever': 'forever',
'30 Days': '30d'
}
def __init__(self, base_url, insecure, proxy, api_key, initial_fetch_interval, reader_config, feed_tags, tlp_color,
have_fetched_first_time):
super().__init__(base_url=base_url, verify=not insecure, proxy=proxy)
self.server_url = base_url
self.api_key = api_key
self.initial_fetch_interval = initial_fetch_interval
self.reader_config = reader_config
self.have_fetched_first_time = have_fetched_first_time
self.insecure = insecure
self.feed_tags = feed_tags
self.tlp_color = tlp_color
def report_address(self, address: str, abuse_type_id: int, abuse_type_other: Optional[str],
abuser: str, description: str) -> Dict:
"""
Sends a post request to report an abuse to BitcoinAbuse servers.
Args:
address (str): The address of the abuser.
abuse_type_id (int): An id which indicates which type of abuse was made.
abuse_type_other (Optional[str]): In case abuse_type_id was other, holds information describing the
abuse type.
abuser (str): Information about the abuser (email, name, ...).
description (str): Description of the abuse (may include email sent, etc).
Returns:
Returns if post request was successful.
"""
return self._http_request(
method='POST',
url_suffix='reports/create',
params=assign_params(
api_token=self.api_key,
address=address,
abuse_type_id=abuse_type_id,
abuser=abuser,
description=description,
abuse_type_other=abuse_type_other
)
)
def get_indicators(self) -> Tuple[List[Dict], bool]:
"""
Builds CSV module client and performs the API call to Bitcoin Abuse service.
If the call was successful, returns list of indicators.
Args:
Returns:
- Throws exception if an invalid api key was given or error occurred during the call to
Bitcoin Abuse service.
- Returns indicators list if the call to Bitcoin Abuse service was successful.
"""
bitcoin_address_count_dict: Dict[str, int] = dict()
params = self.build_params_for_csv_module()
csv_module_client = Client(**params)
indicators, no_update = fetch_indicators_command(
client=csv_module_client,
default_indicator_type='Cryptocurrency Address',
auto_detect=False,
limit=0
)
self.assure_valid_response(indicators)
indicators_without_duplicates = []
# in every fetch apart from first fetch, we are only fetching one csv file, so we know there aren't any
# duplicates
if self.have_fetched_first_time:
for indicator in indicators:
update_indicator_occurrences(indicator, bitcoin_address_count_dict)
indicators_without_duplicates = indicators
# in first fetch according to configurations, we might fetch more than one csv file, so we need to remove
# duplicates
else:
indicators_ids = set()
for indicator in indicators:
try:
indicator_id = int(indicator['rawJSON']['id'])
if indicator_id not in indicators_ids:
update_indicator_occurrences(indicator, bitcoin_address_count_dict)
indicators_without_duplicates.append(indicator)
indicators_ids.add(indicator_id)
except ValueError:
demisto.debug(f'The following indicator was found invalid and was skipped: {indicator}')
for indicator in indicators_without_duplicates:
indicator_count = bitcoin_address_count_dict.get(indicator['value'])
indicator['fields']['reportscount'] = indicator_count
indicator['fields']['cryptocurrencyaddresstype'] = 'bitcoin'
return indicators_without_duplicates, no_update
def build_fetch_indicators_url_suffixes(self) -> Set[str]:
"""
Builds the URL suffix fo the fetch. Default is 'download/1d' unless this is a first fetch
which will be determined by the user parameter - initial_fetch_interval.
- If initial_fetch_interval is 'forever' - then suffixes will include 30d and forever, in order to extract
the most updated data by the Bitcoin Abuse service, as 'forever' csv file only updates on 15 of each month
(see Complete Download in https://www.bitcoinabuse.com/api-docs).
- If initial_fetch_interval is '30d' - suffixes will only include '30d' suffix.
Args:
Returns:
- Set of the url suffixes to be used in the fetch process.
"""
feed_endpoint_suffix = 'download/'
first_feed_interval_url_suffix = self.FIRST_FETCH_INTERVAL_TO_URL_SUFFIX.get(
self.initial_fetch_interval, '30d')
if self.have_fetched_first_time:
return {f'{feed_endpoint_suffix}1d'}
else:
return {feed_endpoint_suffix + first_feed_interval_url_suffix, feed_endpoint_suffix + '30d'}
def build_params_for_csv_module(self):
"""
Builds needed params for CSV Module client in order to fetch indicators from Bitcoin Abuse service.
Args:
Returns:
- Params for building the CSVFeedApiModule client.
"""
params: Dict[str, Any] = dict()
urls_suffixes = self.build_fetch_indicators_url_suffixes()
urls = [f'{self.server_url}{url_suffix}?api_token={self.api_key}' for url_suffix in urls_suffixes]
feed_url_to_config = {url: self.reader_config for url in urls}
params['url'] = urls
params['feed_url_to_config'] = feed_url_to_config
params['delimiter'] = ','
params['encoding'] = 'utf-8'
params['insecure'] = self.insecure
params['feedTags'] = self.feed_tags
params['tlp_color'] = self.tlp_color
return params
@staticmethod
def assure_valid_response(indicators: List[Dict]) -> None:
"""
Receives the indicators fetched from Bitcoin Abuse service, and checks if
the response received is valid.
When an incorrect api key is inserted, Bitcoin Abuse returns response of
their login page.
This function checks if the api key given is incorrect by checking if the received
response was the login page.
Throws DemistoException to inform the user of incorrect api key.
Args:
indicators (List[Dict]): The array of indicators fetched.
Returns:
- Throws DemistoException in case an incorrect api key was given.
"""
if indicators and '<html lang="en">' == indicators[0].get('value'):
raise DemistoException('api token inserted is not valid')
''' HELPER FUNCTIONS '''
def update_indicator_occurrences(indicator_obj: Dict, bitcoin_address_count_dict: Dict[str, int]) -> None:
"""
Receives an indicator object 'indicator_obj' and a dict 'bitcoin_address_count_dict' which counts occurrences
of bitcoin addresses.
Updates the occurrences in 'bitcoin_address_count_dict' according to 'indicator_obj' address.
Args:
indicator_obj (Dict): The indicator.
bitcoin_address_count_dict(Dict[str, int]): Bitcoin addresses to occurrences dict.
Returns:
"""
indicator_name = indicator_obj['value']
updated_count = bitcoin_address_count_dict.get(indicator_name, 0) + 1
bitcoin_address_count_dict[indicator_name] = updated_count
''' COMMAND FUNCTIONS '''
def bitcoin_abuse_report_address_command(bitcoin_client: BitcoinAbuseClient, args: Dict) -> CommandResults:
"""
Reports a bitcoin abuse to Bitcoin Abuse service.
Args:
bitcoin_client (BitcoinAbuseClient): Client object to perform request.
args (Dict): Demisto args for report address command.
Returns:
str: 'bitcoin address (address reported) by abuser (abuser reported) was
reported to BitcoinAbuse API' if http request was successful'.
"""
abuse_type_id = ABUSE_TYPE_NAME_TO_ID.get(args.get('abuse_type', ''))
abuse_type_other = args.get('abuse_type_other')
address = args.get('address', '')
abuser = args.get('abuser', '')
description = args.get('description', '')
if abuse_type_id is None:
raise DemistoException('Bitcoin Abuse: invalid type of abuse, please insert a correct abuse type')
if abuse_type_id == ABUSE_TYPE_NAME_TO_ID['other'] and abuse_type_other is None:
raise DemistoException('Bitcoin Abuse: abuse_type_other is mandatory when abuse type is other')
http_response = bitcoin_client.report_address(address=address,
abuse_type_id=abuse_type_id,
abuse_type_other=abuse_type_other,
abuser=abuser,
description=description)
if argToBoolean(http_response.get('success', False)):
return CommandResults(
readable_output=f'Bitcoin address {address} by abuse bitcoin user {abuser}'
f' was reported to BitcoinAbuse service'
)
else:
failure_message = http_response.get('response', 'Unknown failure reason')
raise DemistoException(f'bitcoin report address did not succeed: {failure_message}')
def test_module_command(bitcoin_client: BitcoinAbuseClient):
"""
Performs a fetch indicators flow to validate the configuration params.
Args:
bitcoin_client (BitcoinAbuseClient): Client object.
Returns:
'ok' if the call to Bitcoin Abuse service was successful and command is test_module.
"""
bitcoin_client.get_indicators()
return 'ok'
def bitcoin_abuse_fetch_indicators_command(bitcoin_client: BitcoinAbuseClient) -> None:
"""
Wrapper for fetching indicators from Bitcoin Abuse service.
Args:
bitcoin_client (BitcoinAbuseClient): Client object.
Returns:
"""
indicators, no_update = bitcoin_client.get_indicators()
if is_demisto_version_ge('6.5.0'):
for b in batch(indicators, batch_size=2000):
demisto.createIndicators(b, noUpdate=no_update) # type: ignore
else:
for b in batch(indicators, batch_size=2000):
demisto.createIndicators(b, noUpdate=no_update) # type: ignore
demisto.setIntegrationContext({'have_fetched_first_time': True})
def bitcoin_abuse_get_indicators_command(bitcoin_client: BitcoinAbuseClient, args: Dict):
"""
Wrapper for retrieving indicators from the feed to the war-room.
Args:
bitcoin_client (BitcoinAbuseClient): Client object.
args (Dict): Demsisto args.
Returns:
CommandResults.
"""
indicators, _ = bitcoin_client.get_indicators()
limit = arg_to_number(args.get('limit', 50), 'limit')
truncated_indicators_list = indicators[:limit]
return CommandResults(
readable_output=tableToMarkdown('Indicators', truncated_indicators_list,
headers=['value', 'type', 'fields']),
raw_response=truncated_indicators_list
)
def main() -> None:
params = demisto.params()
command = demisto.command()
demisto.debug(f'Bitcoin Abuse: Command being called is {demisto.command()}')
api_key = params.get('api_key', '')
insecure = params.get('insecure', False)
proxy = params.get('proxy', False)
feed_tags = argToList(params.get('feedTags'))
tlp_color = params.get('tlp_color')
initial_fetch_interval = params.get('initial_fetch_interval', '30 Days')
have_fetched_first_time = argToBoolean(demisto.getIntegrationContext().get('have_fetched_first_time', False))
try:
bitcoin_client = BitcoinAbuseClient(
base_url=SERVER_URL,
insecure=insecure,
proxy=proxy,
api_key=api_key,
initial_fetch_interval=initial_fetch_interval,
reader_config=READER_CONFIG,
feed_tags=feed_tags,
tlp_color=tlp_color,
have_fetched_first_time=have_fetched_first_time)
if command == 'test-module':
return_results(test_module_command(bitcoin_client))
elif command == 'fetch-indicators':
bitcoin_abuse_fetch_indicators_command(bitcoin_client)
elif command == 'bitcoinabuse-get-indicators':
return_results(bitcoin_abuse_get_indicators_command(bitcoin_client, demisto.args()))
elif command == 'bitcoinabuse-report-address':
return_results(bitcoin_abuse_report_address_command(bitcoin_client, demisto.args()))
# Log exceptions and return errors
except Exception as e:
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Scripts/CheckIfSubdomain/CheckIfSubdomain_test.py
```python
import pytest
import demistomock as demisto
CIDR_RANGES = [
('issubdomain.good.com,anothersubdomain.good.com,notasubdomain.bad.com', 'good.com', 3, [True, True, False]),
('"issubdomain.good.com, anothersubdomain.good.com, notasubdomain.bad.com"', 'good.com', 3, [True, True, False]),
('subdomain.good.com,notsubdomain.bad.com', '*.good.com', 2, [True, False]),
('subdomain.good.com,notsubdomain.bad.com,subdomain.stillgood.com', '*.good.com,stillgood.com', 3, [True, False, True]),
('subdomain', 'good.com', 1, [False]), # invalid internal domain
('subdomain.good.com', 'com', 1, [False]), # invalid domain
]
@pytest.mark.parametrize('left,right,call_count,result', CIDR_RANGES)
def test_main(mocker, left, right, call_count, result):
from CheckIfSubdomain import main
mocker.patch.object(demisto, 'args', return_value={
'left': left,
'right': right
})
mocker.patch.object(demisto, 'results')
main()
assert demisto.results.call_count == call_count
for i in range(len(result)):
results = demisto.results.call_args_list[i][0][0]
assert results == result[i]
```
#### File: Scripts/CopyContextToField/CopyContextToField.py
```python
from CommonServerPython import *
def get_context(incident_id):
res = demisto.executeCommand("getContext", {'id': incident_id})
try:
return res[0]['Contents'].get('context') or {}
except Exception:
return {}
res = demisto.executeCommand("getIncidents", {"query": demisto.args()['incidentsQuery'],
'limit': int(demisto.args()['limit'])})
incidents = res[0]['Contents']['data']
src_context_key = demisto.args()['sourceContextKey']
target_incident_field = demisto.args()['targetIncidentField']
list_separator = demisto.args()['listSeparator']
success_count = 0
failed_count = 0
skipped_count = 0
for i in incidents:
incident_id = i['id']
context = get_context(incident_id)
value = demisto.dt(context, src_context_key)
if isinstance(value, list) and len(value) > 0:
if len(value) == 1:
value = value[0]
elif isinstance(value[0], STRING_TYPES):
value = list_separator.join(value)
if value and not isinstance(value, list) and not isinstance(value, dict):
res = demisto.executeCommand("setIncident", {target_incident_field: value, 'id': i['id']})
if isError(res[0]):
failed_count += 1
else:
success_count += 1
else:
skipped_count += 1
if success_count > 0:
return_results(f'Update incidents: {success_count} success')
if skipped_count > 0:
return_results(f'Skipped {skipped_count} incidents due to missing value')
if failed_count > 0:
return_results(f'Failed to update {failed_count} incidents with setIncident error')
```
#### File: Integrations/CortexCoreIR/CortexCoreIR_test.py
```python
import json
Core_URL = 'https://api.xdrurl.com'
def load_test_data(json_path):
with open(json_path) as f:
return json.load(f)
def test_report_incorrect_wildfire_command(mocker):
"""
Given:
- FilterObject and name to get by exclisions.
When
- A user desires to get exclusions.
Then
- returns markdown, context data and raw response.
"""
from CortexCoreIR import report_incorrect_wildfire_command, Client
wildfire_response = load_test_data('./test_data/wildfire_response.json')
mock_client = Client(base_url=f'{Core_URL}/public_api/v1', headers={})
mocker.patch.object(mock_client, 'report_incorrect_wildfire', return_value=wildfire_response)
file_hash = "11d69fb388ff59e5ba6ca217ca04ecde6a38fa8fb306aa5f1b72e22bb7c3a252"
args = {
"email": "<EMAIL>",
"file_hash": file_hash,
"new_verdict": 0,
"reason": "test1"
}
res = report_incorrect_wildfire_command(client=mock_client, args=args)
assert res.readable_output == f'Reported incorrect WildFire on {file_hash}'
```
#### File: Scripts/ReadNetstatFile/ReadNetstatFile.py
```python
from CommonServerPython import *
COMMAND_NAME = 'netstat'
def get_netstat_file_name(command_files):
if command_files and isinstance(command_files, dict):
netstat_files = command_files.get(COMMAND_NAME, [])
if netstat_files:
if isinstance(netstat_files, list):
# we want to get the last file name
return netstat_files[len(netstat_files) - 1].get('Filename')
elif isinstance(netstat_files, dict):
return netstat_files.get('Filename') # type:ignore
def get_file_name_from_context() -> str:
crowdstrike_context = demisto.context().get('CrowdStrike', {})
all_command_files = []
if isinstance(crowdstrike_context, list):
for ctx in crowdstrike_context:
if cmd_ctx := ctx.get('Command'):
all_command_files.append(cmd_ctx)
elif isinstance(crowdstrike_context, dict) and (cmd_ctx := crowdstrike_context.get('Command')):
all_command_files.append(cmd_ctx)
for command_file in all_command_files[::-1]: # get last file in context
if file_name := get_netstat_file_name(command_file):
return file_name
return ""
def get_file_entry_id(file_name):
file_entry_id = ""
if file_name:
entries = demisto.executeCommand('getEntries', {})
for entry in entries:
file_entry = demisto.get(entry, 'File')
is_correct_file = file_name.lower() == file_entry.lower()
if is_correct_file:
file_entry_id = entry['ID']
break
return file_entry_id
def get_file_content(file_entry_id):
if file_entry_id:
res = execute_command('getFilePath', {'id': file_entry_id})
file_path = res.get('path')
with open(file_path, 'r') as f:
file_content = f.read()
return file_content
def main():
file_name = get_file_name_from_context()
if file_name:
demisto.results(get_file_content(get_file_entry_id(file_name)))
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Integrations/CyberArkIdentityEventCollector/CyberArkIdentityEventCollector.py
```python
import urllib3
from SiemApiModule import * # noqa: E402
urllib3.disable_warnings()
# ----------------------------------------- GLOBAL VARIABLES -----------------------------------------
DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
EVENT_FIELDS = [
'ID',
'EventType',
'AuthMethod',
'DirectoryServiceUuid',
'DirectoryServicePartnerName',
'EntityName',
'EntityType',
'EntityUuid',
'FromIPAddress',
'ImpersonatorUuid',
'Level',
'NewEntity',
'NormalizedUser',
'OldEntity',
'RequestDeviceOS',
'RequestHostName',
'RequestIsMobileDevice',
'Tenant',
'UserGuid',
'WhenLogged',
'WhenOccurred',
'ObjectName',
'ObjectType',
'RoleId',
'Role',
'Changer',
'ChangerUuid',
'Result',
'Alias',
'ReplaceDomain',
'Type',
'Id',
'ProfileName',
'Thumbprint',
'TargetUserID',
'TargetUser',
'Uuid',
'Key',
'Value',
'UserState',
'PreviousUserState',
'FailedMessage',
'Exception',
'DSType',
'DSName',
'DSUuid',
'ImpersonateTargetUuid',
'ImpersonateTargetName',
'EmailAddress',
'Session',
'MfaResult',
'MfaReason',
'SetPath',
'ProxyId',
'MachineName',
'ClientAddress',
'ConnectorUuid',
'HostAddress',
'UserName',
'Target',
'Cname',
'OldState',
'NewState',
'AffectedTenant',
'OU',
'DeviceID',
'EnrollProfileUser',
'LocalAccountUuid',
'DeviceName',
'FailureReason',
'From',
'To',
'Description',
'DeleteReason',
'LicenseType',
'NewLicenseType',
'OldLicenseType',
'ApplicationType',
'ApplicationName',
'ApplicationID',
'MobileAppType',
'AppId',
'AppName',
'JobUniqueId',
'SyncAction',
'SyncActionReason',
'SyncResult',
'SessionId',
]
class CyberArkIdentityEventsOptions(IntegrationOptions):
app_id: str
class CyberArkIdentityEventsRequest(IntegrationHTTPRequest):
method = Method.POST
headers = {'Accept': '*/*', 'Content-Type': 'application/json'}
class CyberArkIdentityEventsClient(IntegrationEventsClient):
request: IntegrationHTTPRequest
options: CyberArkIdentityEventsOptions
def __init__(
self,
request: CyberArkIdentityEventsRequest,
options: CyberArkIdentityEventsOptions,
credentials: Credentials,
session=requests.Session(),
) -> None:
self.access_token = None
self.credentials = credentials
super().__init__(request, options, session)
def set_request_filter(self, after: Any):
return after
def authenticate(self):
credentials = base64.b64encode(f'{self.credentials.identifier}:{self.credentials.password}'.encode()).decode()
request = IntegrationHTTPRequest(
method=Method.POST,
url=f"{self.request.url.removesuffix('/RedRock/Query')}/oauth2/token/{self.options.app_id}",
headers={'Authorization': f"Basic {credentials}"},
data={'grant_type': 'client_credentials', 'scope': 'siem'},
verify=not self.request.verify,
)
response = self.call(request)
if response.ok:
demisto.debug('authenticated successfully')
self.access_token = response.json()['access_token']
self.request.headers['Authorization'] = f'Bearer {self.access_token}'
else:
demisto.debug(f'authentication failed: {response.json()}')
class CyberArkIdentityGetEvents(IntegrationGetEvents):
client: CyberArkIdentityEventsClient
@staticmethod
def get_last_run_ids(events: list) -> list:
return [event.get('ID') for event in events]
@staticmethod
def get_last_run_time(events: list) -> str:
# The date is in timestamp format and looks like {'WhenOccurred': '/Date(1651483379362)/'}
last_timestamp = max([int(e.get('WhenOccurred', '').removesuffix(')/').removeprefix('/Date(')) for e in events])
return datetime.utcfromtimestamp(last_timestamp / 1000).strftime(DATETIME_FORMAT)
def get_last_run(self, events: list) -> dict: # type: ignore
return {'from': self.get_last_run_time(events), 'ids': self.get_last_run_ids(events)}
def _iter_events(self):
self.client.authenticate()
result = self.client.call(self.client.request).json()['Result']
if events := result.get('Results'):
fetched_events_ids = demisto.getLastRun().get('ids', [])
yield [event.get('Row') for event in events if event.get('Row', {}).get('ID') not in fetched_events_ids]
def get_request_params(**kwargs: dict) -> dict:
fetch_from = str(kwargs.get('from', '3 days'))
default_from_day = datetime.now() - timedelta(days=3)
from_time = datetime.strftime(dateparser.parse(fetch_from, settings={'TIMEZONE': 'UTC'}) or default_from_day, DATETIME_FORMAT)
params = {
'url': f"{str(kwargs.get('url', '')).removesuffix('/')}/RedRock/Query",
'data': json.dumps({
'Script': f"Select {', '.join(EVENT_FIELDS)} from Event where WhenOccurred > '{from_time}'",
'args': {
'PageNumber': 1,
'PageSize': kwargs.get('limit', 1000)
}
}),
'verify': not kwargs.get('insecure')
}
return params
def main(command: str, demisto_params: dict):
credentials = Credentials(**demisto_params.get('credentials', {}))
options = CyberArkIdentityEventsOptions(**demisto_params)
request_params = get_request_params(**demisto_params)
request = CyberArkIdentityEventsRequest(**request_params)
client = CyberArkIdentityEventsClient(request, options, credentials)
get_events = CyberArkIdentityGetEvents(client, options)
try:
if command == 'test-module':
get_events.run()
demisto.results('ok')
if command in ('fetch-events', 'cyberarkidentity-get-events'):
events = get_events.run()
if command == 'fetch-events' or demisto_params.get('should_push_events'):
send_events_to_xsiam(events, vendor=demisto_params.get('vendor', 'cyberark'),
product=demisto_params.get('product', 'identity'))
if events:
last_run = get_events.get_last_run(events)
demisto.debug(f'Set last run to {last_run}')
demisto.setLastRun(last_run)
if command == 'cyberarkidentity-get-events':
command_results = CommandResults(
readable_output=tableToMarkdown(
'CyberArk Identity RedRock records', events, removeNull=True, headerTransform=pascalToSpace
),
raw_response=events,
)
return_results(command_results)
except Exception as e:
return_error(str(e))
if __name__ in ('__main__', '__builtin__', 'builtins'):
# Args is always stronger. Get getIntegrationContext even stronger
demisto_params_ = demisto.params() | demisto.args() | demisto.getLastRun()
main(demisto.command(), demisto_params_)
```
#### File: Integrations/Cylance_Protect_v2/Cylance_Protect_v2_test.py
```python
import json
import demistomock as demisto
from CommonServerPython import Common
from Cylance_Protect_v2 import create_dbot_score_entry, translate_score, FILE_THRESHOLD, \
get_device, get_device_by_hostname, update_device, get_device_threats, get_policies, create_zone, get_zones, \
get_zone, update_zone, get_threat, get_threats, get_threat_devices, get_list, get_list_entry_by_hash, \
add_hash_to_list, delete_hash_from_lists, delete_devices, get_policy_details, create_instaquery, list_instaquery, \
get_instaquery_result
import Cylance_Protect_v2
THREAT_OUTPUT = {u'cylance_score': -1.0, u'name': u'name',
u'classification': u'Malware',
u'sub_classification': u'Virus',
u'av_industry': None,
u'unique_to_cylance': False,
u'last_found': u'2019-01-28T23:36:58',
u'global_quarantined': False,
u'file_size': 2177386,
u'safelisted': False,
u'sha256': u'055D7A25DECF6769BF4FB2F3BC9FD3159C8B42972818177E44975929D97292DE',
u'md5': u'B4EA38EB798EA1C1E067DFD176B882BB',
}
DEVICE_OUTPUT = {u'update_available': False,
u'date_last_modified': u'2020-11-09T23:10:24',
u'distinguished_name': u'',
u'ip_addresses': [u'1.1.1.1'],
u'dlcm_status': u'Unknown',
u'background_detection': False,
u'id': u'8e836c98-102e-4332-b00d-81dcb7a9b6f7',
u'days_to_deletion': u'Unknown',
u'os_version': u'Microsoft Windows 10 Education',
u'state': u'Offline',
u'date_first_registered': u'2020-11-09T22:28:48',
u'policy': {u'id': u'32e4aacd-7698-4ef0-93e8-3e6f1f5c6857', u'name': u'Default'},
u'host_name': u'DESKTOP-M7E991U',
u'os_kernel_version': u'10.0.0',
u'mac_addresses': [u'00-0C-29-41-20-14'],
u'last_logged_in_user': u'DESKTOP-M7E991U\\scott.white',
u'name': u'DESKTOP-M7E991U',
u'date_offline': u'2020-11-09T23:10:21.902',
u'products': [{u'status': u'Offline', u'version': u'2.0.1500', u'name': u'protect'}],
u'update_type': None,
u'is_safe': True,
u'agent_version': u'2.0.1500'
}
EXPECTED_DEVICE = {'Name': u'DESKTOP-M7E991U',
'Hostname': u'DESKTOP-M7E991U',
'State': u'Offline',
'DateFirstRegistered': u'2020-11-09T22:28:48',
'Policy': {'ID': u'32e4aacd-7698-4ef0-93e8-3e6f1f5c6857', 'Name': u'Default'},
'OSVersion': u'Microsoft Windows 10 Education',
'LastLoggedInUser': u'DESKTOP-M7E991U\\scott.white',
'MACAdress': [u'00-0C-29-41-20-14'],
'BackgroundDetection': False,
'IsSafe': True,
'UpdateAvailable': False,
'ID': u'8e836c98-102e-4332-b00d-81dcb7a9b6f7',
'DateLastModified': u'2020-11-09T23:10:24',
'DateOffline': u'2020-11-09T23:10:21.902',
'IPAddress': [u'1.1.1.1']
}
EXPECTED_HOSTNAME = {'Name': u'DESKTOP-M7E991U',
'Hostname': u'DESKTOP-M7E991U',
'State': u'Offline',
'DateFirstRegistered': u'2020-11-09T22:28:48',
'Policy': {'ID': u'32e4aacd-7698-4ef0-93e8-3e6f1f5c6857', 'Name': u'Default'},
'OSVersion': u'Microsoft Windows 10 Education',
'LastLoggedInUser': u'DESKTOP-M7E991U\\scott.white',
'MACAdress': [u'00-0C-29-41-20-14'],
'BackgroundDetection': False,
'IsSafe': True,
'UpdateAvailable': False,
'ID': u'8e836c98-102e-4332-b00d-81dcb7a9b6f7',
'DateLastModified': u'2020-11-09T23:10:24',
'AgentVersion': u'2.0.1500',
'DateOffline': u'2020-11-09T23:10:21.902',
'IPAddress': [u'1.1.1.1']
}
DEVICE_THREAT_OUTPUT = {u'sha256': u'0F427B33B824110427B2BA7BE20740B45EA4DA41BC1416DD55771EDFB0C18F09',
u'name': u'name',
u'classification': u'Malware',
u'date_found': u'2018-09-17T07:14:03',
u'file_status': u'Default',
u'cylance_score': -1.0,
u'file_path': u'C:\\Ransomware Samples\\AutoitLocker.exe',
u'sub_classification': u'Trojan'
}
POLICIES_OUTPUT = {u'zone_count': 1,
u'name': u'fff',
u'date_modified': u'2020-04-13T10:32:43.5072251',
u'device_count': 0,
u'date_added': u'2020-04-13T10:32:43.5072251',
u'id': u'980fad21-b119-4cc4-ac97-2b2c035b4666'
}
EXPECTED_POLICIES = {u'DateAdded': u'2020-04-13T10:32:43.5072251',
u'Name': u'fff',
u'ZoneCount': 1,
u'DateModified': u'2020-04-13T10:32:43.5072251',
u'DeviceCount': 0,
u'Id': u'980fad21-b119-4cc4-ac97-2b2c035b4666'
}
ZONE_OUTPUT = {u'date_created': u'2022-02-03T15:52:30.4108727Z',
u'policy_id': u'980fad21-b119-4cc4-ac97-2b2c035b4666',
u'id': u'1998235b-a6ab-4043-86b5-81b0dc63887b',
u'criticality': u'Low',
u'name': u'name'
}
ZONES_OUTPUT = {u'name': u'name',
u'criticality': u'Low',
u'date_modified': u'2022-02-03T15:52:30',
u'zone_rule_id': None,
u'update_type': u'Production',
u'date_created': u'2022-02-03T15:52:30',
u'id': u'1998235b-a6ab-4043-86b5-81b0dc63887b',
u'policy_id': u'980fad21-b119-4cc4-ac97-2b2c035b4666'
}
EXPECTED_ZONES = {u'Name': u'name',
u'Criticality': u'Low',
u'UpdateType': u'Production',
u'DateCreated': u'2022-02-03T15:52:30',
u'PolicyId': u'980fad21-b119-4cc4-ac97-2b2c035b4666',
u'Id': u'1998235b-a6ab-4043-86b5-81b0dc63887b',
u'DateModified': u'2022-02-03T15:52:30'
}
THREAT_DEVICES_OUTPUT = {u'name': u'DESKTOP-M7E991U',
u'ip_addresses': [u'1.1.1.1'],
u'mac_addresses': [u'00-0C-29-59-FB-FD'],
u'file_path': u'file path',
u'state': u'OffLine',
u'date_found': u'2019-01-28T23:36:58',
u'file_status': u'Quarantined',
u'agent_version': u'2.0.1500',
u'id': u'<PASSWORD>',
u'policy_id': u'2f184387-4cb0-4913-8e73-9c13a3af3470'
}
EXPECTED_THREAT_DEVICES = {'Path': [{'FilePath': u'file path'}],
'SHA256': u'055D7A25DECF6769BF4FB2F3BC9FD3159C8B42972818177E44975929D97292DE'
}
LIST_OUTPUT = {u'category': u'Admin Tool',
u'cylance_score': None,
u'name': u'',
u'classification': u'',
u'sub_classification': u'',
u'av_industry': None,
u'reason': u'Added by Demisto',
u'added': u'2018-11-13T13:39:07',
u'list_type': u'GlobalSafe',
u'sha256': u'234E5014C239FD89F2F3D56091B87763DCD90F6E3DB42FD2FA1E0ABE05AF0487',
u'added_by': u'1<PASSWORD>',
u'md5': u''
}
EXPECTED_LIST = {u'Category': u'Admin Tool',
u'Added': u'2018-11-13T13:39:07',
'SHA256': u'234E5014C239FD89F2F3D56091B87763DCD90F6E3DB42FD2FA1E0ABE05AF0487',
u'AddedBy': u'14<PASSWORD>-d6<PASSWORD>',
u'Reason': u'Added by Demisto',
u'ListType': u'GlobalSafe',
u'Sha256': u'234E5014C239FD89F2F3D56091B87763DCD90F6E3DB42FD2FA1E0ABE05AF0487'
}
POLICY_OUTPUT = {u'memoryviolation_actions': {u'memory_violations': [],
u'memory_exclusion_list': [],
u'memory_violations_ext': []},
u'logpolicy': {u'log_upload': None,
u'maxlogsize': u'100',
u'retentiondays': u'30'
},
u'file_exclusions': [],
u'checksum': u'987978644c220a71f6fa67685b06571d',
u'filetype_actions': {u'suspicious_files': [{u'file_type': u'executable', u'actions': u'0'}],
u'threat_files': [{u'file_type': u'executable', u'actions': u'0'}]},
u'policy_name': u'fff',
u'policy_utctimestamp': u'/Date(1586773964507+0000)/',
u'policy': [{u'name': u'auto_blocking', u'value': u'0'},
{u'name': u'auto_uploading', u'value': u'0'},
{u'name': u'threat_report_limit', u'value': u'500'},
{u'name': u'low_confidence_threshold',
u'value': u'-600'},
{u'name': u'full_disc_scan', u'value': u'0'},
{u'name': u'watch_for_new_files', u'value': u'0'},
{u'name': u'memory_exploit_detection', u'value': u'0'},
{u'name': u'trust_files_in_scan_exception_list', u'value': u'0'},
{u'name': u'logpolicy', u'value': u'0'},
{u'name': u'script_control', u'value': u'0'},
{u'name': u'prevent_service_shutdown', u'value': u'0'},
{u'name': u'scan_max_archive_size', u'value': u'0'},
{u'name': u'sample_copy_path', u'value': None},
{u'name': u'kill_running_threats', u'value': u'0'},
{u'name': u'show_notifications', u'value': u'0'},
{u'name': u'optics_set_disk_usage_maximum_fixed',
u'value': u'1000'},
{u'name': u'optics_malware_auto_upload', u'value': u'0'},
{u'name': u'optics_memory_defense_auto_upload', u'value': u'0'},
{u'name': u'optics_script_control_auto_upload', u'value': u'0'},
{u'name': u'optics_application_control_auto_upload', u'value': u'0'},
{u'name': u'optics_sensors_dns_visibility', u'value': u'0'},
{u'name': u'optics_sensors_private_network_address_visibility', u'value': u'0'},
{u'name': u'optics_sensors_windows_event_log_visibility', u'value': u'0'},
{u'name': u'optics_sensors_advanced_powershell_visibility', u'value': u'0'},
{u'name': u'optics_sensors_advanced_wmi_visibility', u'value': u'0'},
{u'name': u'optics_sensors_advanced_executable_parsing', u'value': u'0'},
{u'name': u'optics_sensors_enhanced_process_hooking_visibility', u'value': u'0'},
{u'name': u'device_control', u'value': u'0'},
{u'name': u'optics', u'value': u'0'},
{u'name': u'auto_delete', u'value': u'0'},
{u'name': u'days_until_deleted', u'value': u'14'},
{u'name': u'pdf_auto_uploading', u'value': u'0'},
{u'name': u'ole_auto_uploading', u'value': u'0'},
{u'name': u'docx_auto_uploading', u'value': u'0'},
{u'name': u'python_auto_uploading', u'value': u'0'},
{u'name': u'autoit_auto_uploading', u'value': u'0'},
{u'name': u'powershell_auto_uploading', u'value': u'0'},
{u'name': u'data_privacy', u'value': u'0'},
{u'name': u'custom_thumbprint', u'value': None},
{u'name': u'scan_exception_list', u'value': []}],
u'policy_id': u'980fad21-b119-4cc4-ac97-2b2c035b4666'
}
EXPECTED_POLICY = {'Timestamp': '2020-04-13T10:32:44.507000+00:00',
'ID': u'980fad21-b119-4cc4-ac97-2b2c035b4666',
'Name': u'fff'
}
INSTAQUERY_OUTPUT = {u'match_type': u'Fuzzy',
u'name': u'Test Instaquery',
u'created_at': u'2022-05-23T00:02:37Z',
u'artifact': u'File',
u'case_sensitive': False,
u'zones': [u'6608CA0E88C64647B276271CC5EA4295'],
u'progress': {},
u'match_value_type': u'Path',
u'results_available': False,
u'match_values': [u'cyoptics.exe'],
u'id': u'CBEB9E9C9A9A41D1BD06C87464F5E2CD',
u'description': u'Test only'}
INSTAQUERY_RESULT_OUTPUT = {
u'status': u'done',
u'id': u'CBEB9E9C9A9A41D1BD06C87464F5E2CD',
u'result': [
{u'@timestamp': 1653264158.3315804,
u'HostName': u'windows-server-',
u'DeviceId': u'65DB26864E364409B50DDC23291A3511',
u'@version': u'1',
u'CorrelationId': u'CBEB9E9C9A9A41D1BD06C87464F5E2CD',
u'Result': u'{"FirstObservedTime": "1970-01-01T00:00:00.000Z", "LastObservedTime": "1970-01-01T00:00:00.000Z", '
u'"Uid": "dHrtLYQzbt9oJPxO8HaeyA==", "Type": "File", "Properties": {"Path": '
u'"c:\\\\program files\\\\cylance\\\\optics\\\\cyoptics.exe", "CreationDateTime": '
u'"2021-03-29T22:34:14.000Z", "Md5": "A081D3268531485BF95DC1A15A5BC6B0", "Sha256": '
u'"256809AABD3AB57949003B9AFCB556A9973222CDE81929982DAE7D306648E462", "Owner": "NT AUTHORITY\\\\SYSTEM", '
u'"SuspectedFileType": "Executable/PE", "FileSignature": "", "Size": "594104", "OwnerUid": '
u'"P3p6fdq3FlMsld6Rz95EOA=="}}'
}
]
}
LIST_INSTAQUERY_OUTPUT = {
u'page_number': 1,
u'page_items': [
{
u'match_type': u'Fuzzy',
u'name': u'Test Insta continue 84',
u'created_at': u'2022-05-23T00:02:37Z',
u'artifact': u'File', u'case_sensitive': False,
u'zones': [u'6608CA0E88C64647B276271CC5EA4295'],
u'progress': {u'queried': 1, u'responded': 1},
u'match_value_type': u'Path',
u'results_available': True,
u'match_values': [u'cyoptics.exe'],
u'id': u'CBEB9E9C9A9A41D1BD06C87464F5E2CD',
u'description': u'Test only'
},
{
u'match_type': u'Exact',
u'name': u'CylanceProtectv2InstaQueryTest Test creation 2',
u'created_at': u'2022-05-20T09:15:09Z',
u'artifact': u'File',
u'case_sensitive': True,
u'zones': [u'6608CA0E88C64647B276271CC5EA4295'],
u'progress': {u'queried': 1, u'responded': 1},
u'match_value_type': u'Path',
u'results_available': False,
u'match_values': [u'exe'],
u'id': u'BC522393DD6E666C9EA9A999767EF5DB',
u'description': u'Description here'
}
],
u'total_pages': 13,
u'total_number_of_items': 26,
u'page_size': 2
}
def test_create_dbot_score_entry():
"""
Given
- a threat and a dbot score
When
- calls the function create_dbot_score_entry
Then
- checks if dbot_score_entry is from type DBotScore
"""
threat = THREAT_OUTPUT
dbot_score = translate_score(threat['cylance_score'], FILE_THRESHOLD)
dbot_score_entry = create_dbot_score_entry(THREAT_OUTPUT, dbot_score)
assert isinstance(dbot_score_entry, Common.DBotScore)
def test_get_device(mocker):
"""
Given
- a threat and demisto args
When
- calls the function get_device
Then
- checks if the output is as expected
"""
args = {'id': '8e836c98-102e-4332-b00d-81dcb7a9b6f7'}
mocker.patch.object(Cylance_Protect_v2, "get_device_request", return_value=DEVICE_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_device()
contents = demisto_results.call_args[0][0]
assert EXPECTED_DEVICE.items() <= contents.get('EntryContext').get('CylanceProtect.Device(val.ID && val.ID === '
'obj.ID)').items()
def test_get_device_by_hostname(mocker):
"""
Given
- a threat and demisto args
When
- calls the function get_device_by_hostname
Then
- checks if the output is as expected
"""
args = {'hostname': 'DESKTOP-M7E991U'}
mocker.patch.object(Cylance_Protect_v2, "get_hostname_request", return_value=DEVICE_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_device_by_hostname()
contents = demisto_results.call_args[0][0]
assert EXPECTED_HOSTNAME.items() <= contents.get('EntryContext').get('CylanceProtect.Device(val.ID && val.ID === '
'obj.ID)').items()
def test_update_device(mocker):
"""
Given
- demisto args
When
- calls the function update_device
Then
- checks if the output is as expected
"""
args = {'id': '8e836c98-102e-4332-b00d-81dcb7a9b6f7',
'name': 'DESKTOP-M7E991U',
'policyId': '32e4aacd-7698-4ef0-93e8-3e6f1f5c6857'
}
mocker.patch.object(Cylance_Protect_v2, "update_device_request", return_value=DEVICE_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
update_device()
contents = demisto_results.call_args[0][0]
assert 'Device 8e836c98-102e-4332-b00d-81dcb7a9b6f7 was updated successfully.' in contents.get('HumanReadable')
def test_get_device_threats(mocker):
"""
Given
- demisto args
When
- calls the function get_device_threats
Then
- checks if the output is as expected
"""
args = {'id': 'dbdb7945-369a-4eba-a364-42f2e5f92cc9', 'threshold': -59}
mocker.patch.object(Cylance_Protect_v2, "get_device_threats_request",
return_value={'page_items': [DEVICE_THREAT_OUTPUT]}
)
mocker.patch.object(Cylance_Protect_v2, "translate_score", return_value=1)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_device_threats()
contents = demisto_results.call_args[0][0]
assert u'0F427B33B824110427B2BA7BE20740B45EA4DA41BC1416DD55771EDFB0C18F09' == \
contents.get('EntryContext').get('File')[0].get('SHA256')
def test_get_policies(mocker):
"""
Given
- demisto args
When
- calls the function get_policies
Then
- checks if the output is as expected
"""
args = {'hostname': 'DESKTOP-M7E991U'}
mocker.patch.object(Cylance_Protect_v2, "get_policies_request", return_value={'page_items': [POLICIES_OUTPUT]})
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_policies()
contents = demisto_results.call_args[0][0]
assert EXPECTED_POLICIES.items() <= contents.get('EntryContext').get(
'CylanceProtect.Policies(val.id && val.id === obj.id)')[0].items()
def test_create_zone(mocker):
"""
Given
- demisto args
When
- calls the function create_zone
Then
- checks if the output is as expected
"""
args = {'policy_id': '980fad21-b119-4cc4-ac97-2b2c035b4666',
'name': 'name',
'criticality': 'Low'
}
mocker.patch.object(Cylance_Protect_v2, "create_zone_request", return_value=ZONE_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
create_zone()
contents = demisto_results.call_args[0][0]
assert 'Zone name was created successfully.' in contents.get('HumanReadable')
def test_get_zones(mocker):
"""
Given
- demisto args
When
- calls the function create_zones
Then
- checks if the output is as expected
"""
args = {'hostname': 'DESKTOP-M7E991U'}
mocker.patch.object(Cylance_Protect_v2, "get_zones_request", return_value={'page_items': [ZONES_OUTPUT]})
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_zones()
contents = demisto_results.call_args[0][0]
assert EXPECTED_ZONES.items() <= \
contents.get('EntryContext').get('CylanceProtect.Zones(val.Id && val.Id === obj.Id)')[0].items()
def test_get_zone(mocker):
"""
Given
- demisto args
When
- calls the function get_zone
Then
- checks if the output is as expected
"""
args = {'id': '1998235b-a6ab-4043-86b5-81b0dc63887b'}
mocker.patch.object(Cylance_Protect_v2, "get_zone_request", return_value=ZONES_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_zone()
contents = demisto_results.call_args[0][0]
assert EXPECTED_ZONES.items() <= contents.get('EntryContext').get('CylanceProtect.Zones(val.Id && val.Id === obj.Id)').items()
def test_update_zone(mocker):
"""
Given
- demisto args
When
- calls the function update_zone
Then
- checks if the output is as expected
"""
args = {'id': '1998235b-a6ab-4043-86b5-81b0dc63887b',
'name': 'name'
}
mocker.patch.object(Cylance_Protect_v2, "update_zone_request", return_value=ZONE_OUTPUT)
mocker.patch.object(Cylance_Protect_v2, "get_zone_request", return_value=ZONE_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
update_zone()
contents = demisto_results.call_args[0][0]
assert 'Zone was updated successfully.' in contents.get('HumanReadable')
def test_get_threat(mocker):
"""
Given
- demisto args
When
- calls the function get_threats
Then
- checks if the output is as expected
"""
args = {'sha256': '055D7A25DECF6769BF4FB2F3BC9FD3159C8B42972818177E44975929D97292DE', 'threshold': -59}
mocker.patch.object(Cylance_Protect_v2, "get_threat_request", return_value=THREAT_OUTPUT)
mocker.patch.object(Cylance_Protect_v2, "translate_score", return_value=1)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_threat()
contents = demisto_results.call_args[0][0]
assert u'055D7A25DECF6769BF4FB2F3BC9FD3159C8B42972818177E44975929D97292DE' == \
contents.get('EntryContext').get('File')[0].get('SHA256')
def test_get_threats(mocker):
"""
Given
- demisto args
When
- calls the function update_device
Then
- checks if the output is as expected
"""
mocker.patch.object(Cylance_Protect_v2, "get_threats_request", return_value={'page_items': [THREAT_OUTPUT]})
mocker.patch.object(demisto, 'args', return_value={'threshold': -59})
mocker.patch.object(Cylance_Protect_v2, "translate_score", return_value=1)
demisto_results = mocker.patch.object(demisto, 'results')
get_threats()
contents = demisto_results.call_args[0][0]
assert u'055D7A25DECF6769BF4FB2F3BC9FD3159C8B42972818177E44975929D97292DE' == contents.get('EntryContext').get(
'File')[0].get('SHA256')
def test_get_threat_devices(mocker):
"""
Given
- demisto args
When
- calls the function get_threat_devices
Then
- checks if the output is as expected
"""
args = {'sha256': '055D7A25DECF6769BF4FB2F3BC9FD3159C8B42972818177E44975929D97292DE'}
mocker.patch.object(Cylance_Protect_v2, "get_threat_devices_request",
return_value={'page_items': [THREAT_DEVICES_OUTPUT]})
mocker.patch.object(Cylance_Protect_v2, "translate_score", return_value=1)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_threat_devices()
contents = demisto_results.call_args[0][0]
assert EXPECTED_THREAT_DEVICES.items() <= contents.get('EntryContext').get('File').items()
def test_get_list(mocker):
"""
Given
- demisto args
When
- calls the function get_list
Then
- checks if the output is as expected
"""
args = {'listTypeId': "GlobalSafe", "sha256": "234E5014C239FD89F2F3D56091B87763DCD90F6E3DB42FD2FA1E0ABE05AF0487"}
mocker.patch.object(Cylance_Protect_v2, "get_list_request",
return_value={'page_items': [LIST_OUTPUT]})
mocker.patch.object(Cylance_Protect_v2, "translate_score", return_value=1)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_list()
contents = demisto_results.call_args[0][0]
assert EXPECTED_LIST <= contents.get('EntryContext').get('File')[0]
def test_get_list_entry_by_hash(mocker):
"""
Given
- demisto args
When
- calls the function get_list_entry_by_hash
Then
- checks if the output is as expected
"""
args = {'listTypeId': "GlobalSafe", "sha256": "234E5014C239FD89F2F3D56091B87763DCD90F6E3DB42FD2FA1E0ABE05AF0487"}
mocker.patch.object(Cylance_Protect_v2, "get_list_request",
return_value={'page_items': [LIST_OUTPUT], u'total_pages': u'total_pages'})
mocker.patch.object(Cylance_Protect_v2, "translate_score", return_value=1)
mocker.patch.object(demisto, 'args', return_value=args)
mocker.patch.object(demisto, 'command', return_value='cylance-protect-get-list-entry')
demisto_results = mocker.patch.object(demisto, 'results')
get_list_entry_by_hash()
contents = demisto_results.call_args[0][0]
assert EXPECTED_LIST.get('Sha256') == contents.get('EntryContext').get('CylanceListSearch').get('Sha256')
def test_add_hash_to_list(mocker):
"""
Given
- demisto args
When
- calls the function add_hash_to_list
Then
- checks if the output is as expected
"""
args = {'listType': "GlobalSafe",
"sha256": "234E5014C239FD89F2F3D56091B87763DCD90F6E3DB42FD2FA1E0ABE05AF0487",
"category": "Admin Tool",
"reason": "Added by Demisto"
}
mocker.patch.object(Cylance_Protect_v2, "add_hash_to_list_request", return_value={'page_items': [LIST_OUTPUT]})
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
add_hash_to_list()
contents = demisto_results.call_args[0][0]
assert 'The requested threat has been successfully added to GlobalSafe hashlist.' in contents.get('HumanReadable')
def test_delete_hash_from_lists(mocker):
"""
Given
- demisto args
When
- calls the function delete_hash_from_lists
Then
- checks if the output is as expected
"""
args = {'listType': "GlobalSafe",
"sha256": "234E5014C239FD89F2F3D56091B87763DCD90F6E3DB42FD2FA1E0ABE05AF0487",
"category": "Admin Tool",
"reason": "Added by Demisto"
}
mocker.patch.object(Cylance_Protect_v2, "delete_hash_from_lists_request", return_value={})
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
delete_hash_from_lists()
contents = demisto_results.call_args[0][0]
assert 'The requested threat has been successfully removed from GlobalSafe hashlist.' in \
contents.get('HumanReadable')
def test_delete_devices(mocker):
"""
Given
- demisto args
When
- calls the function delete_devices
Then
- checks if the output is as expected
"""
args = {'deviceIds': "8e836c98-102e-4332-b00d-81dcb7a9b6f7",
"batch_size": 1,
"category": "Admin Tool",
"reason": "Added by Demisto"
}
mocker.patch.object(Cylance_Protect_v2, "get_device_request", return_value=DEVICE_OUTPUT)
mocker.patch.object(Cylance_Protect_v2, "delete_devices_request", return_value={})
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
delete_devices()
contents = demisto_results.call_args[0][0]
assert 'The requested devices have been successfully removed from your organization list.' in \
contents.get('HumanReadable')
def test_get_policy_details(mocker):
"""
Given
- demisto args
When
- calls the function get_policy_details
Then
- checks if the output is as expected
"""
args = {'policyID': '980fad21-b119-4cc4-ac97-2b2c035b4666'}
mocker.patch.object(Cylance_Protect_v2, "get_policy_details_request", return_value=POLICY_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_policy_details()
contents = demisto_results.call_args[0][0]
assert EXPECTED_POLICY.get("ID") == \
contents.get('EntryContext').get('Cylance.Policy(val.policy_id && val.policy_id == obj.policy_id)').get("policy_id")
def test_create_instaquery(mocker):
"""
Given
- demisto args
When
- calls the function create_insta_query
Then
- checks if the output is as expected
"""
args = {
"name": "<NAME>",
"description": "To collect test result",
"artifact": "File",
"match_value_type": "File.Path",
"match_values": "cyoptics.exe",
"case_sensitive": False,
"match_type": "Fuzzy",
"zone": "6608ca0e-88c6-4647-b276-271cc5ea4295"
}
mocker.patch.object(Cylance_Protect_v2, "create_instaquery_request", return_value=INSTAQUERY_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
create_instaquery()
contents = demisto_results.call_args[0][0]
assert INSTAQUERY_OUTPUT.get("id") == \
contents.get('EntryContext').get('InstaQuery.New(val.id && val.id == obj.id)').get("id")
def test_get_instaquery_result(mocker):
"""
Given
- demisto args
When
- calls the function get_instaquery_result
Then
- checks if the output is as expected
"""
args = {'query_id': 'CBEB9E9C9A9A41D1BD06C87464F5E2CD'}
mocker.patch.object(Cylance_Protect_v2, "get_instaquery_result_request", return_value=INSTAQUERY_RESULT_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
get_instaquery_result()
contents = demisto_results.call_args[0][0]
assert json.loads(INSTAQUERY_RESULT_OUTPUT['result'][0]['Result']).get(
'Properties').get('Sha256') in contents.get('HumanReadable')
def test_list_instaquery(mocker):
"""
Given
- demisto args
When
- calls the function list_instaquery
Then
- checks if the number of output items is as expected
"""
args = {'page_number': '1',
'page_size': '2'
}
mocker.patch.object(Cylance_Protect_v2, "list_instaquery_request", return_value=LIST_INSTAQUERY_OUTPUT)
mocker.patch.object(demisto, 'args', return_value=args)
demisto_results = mocker.patch.object(demisto, 'results')
list_instaquery()
contents = demisto_results.call_args[0][0]
assert len(LIST_INSTAQUERY_OUTPUT.get("page_items")) == \
len(contents.get('EntryContext').get("InstaQuery.List").get("page_items"))
```
#### File: Integrations/APIMetricsValidation/APIMetricsValidation_test.py
```python
import APIMetricsValidation
import demistomock as demisto
from CommonServerPython import * # noqa: F401
def test_scenario_one():
"""
Given: 10 successful API Calls
When: API Metrics Validation scenario one is run
Then: API Metrics Validation should return an execution_metrics object with 10 successful API calls
"""
expected_result = [{'Type': 'Successful', 'APICallsCount': 10}]
returned_result = APIMetricsValidation.scenario_one()
assert expected_result == returned_result[10].execution_metrics
def test_scenario_two(mocker):
"""
Given: 5 successful and 5 failed API Calls
When: API Metrics Validation scenario two is run
Then: API Metrics Validation should return an execution_metrics object with 5 failed API calls and 5 successful API calls
"""
expected_result = [
{'Type': 'Successful', 'APICallsCount': 5},
{'Type': 'QuotaError', 'APICallsCount': 5}
]
mocker.patch.object(demisto, 'demistoVersion', return_value={'version': '6.5.0', 'buildNumber': '61000'})
returned_result = APIMetricsValidation.scenario_two()
assert expected_result == returned_result[6].execution_metrics
def test_scenario_three(mocker):
"""
Given: 5 API calls which fail on quota error
When: API Metrics Validation scenario three is run
Then: 5 scheduled command results, and no execution metrics
"""
mocker.patch('CommonServerPython.ScheduledCommand.raise_error_if_not_supported')
expected_result = {
'items_to_schedule': [
'sixsix',
'sevenseven',
'eighteight',
'ninenine',
'tenten'
],
'polling': True
}
returned_result = APIMetricsValidation.scenario_three()
assert expected_result == returned_result[0].scheduled_command._args
def test_scenario_four(mocker):
"""
Given: 5 API calls 2 succeed and 3 which are scheduled
When: API Metrics Validation scenario four is run
Then: 3 scheduled command results, and 2 successful execution metrics
"""
mocker.patch('CommonServerPython.ScheduledCommand.raise_error_if_not_supported')
expected_result = [{'Type': 'Successful', 'APICallsCount': 2}]
returned_result = APIMetricsValidation.scenario_four()
assert expected_result == returned_result[3].execution_metrics
def test_scenario_five(mocker):
"""
Given: 1 API call which passes
When: API Metrics Validation scenario five is run
Then: 1 execution metrics containing one successful API call
"""
mocker.patch('CommonServerPython.ScheduledCommand.raise_error_if_not_supported')
expected_result = [{'Type': 'Successful', 'APICallsCount': 1}]
returned_result = APIMetricsValidation.scenario_five()
assert expected_result == returned_result[1].execution_metrics
def test_scenario_six(mocker):
"""
Given: 1 API call which fails on quota error
When: API Metrics Validation scenario six is run
Then: 1 scheduled command result, and execution metrics containing one quota error
"""
mocker.patch('CommonServerPython.ScheduledCommand.raise_error_if_not_supported')
expected_result = [{'APICallsCount': 1, 'Type': 'QuotaError'}]
returned_result = APIMetricsValidation.scenario_six()
assert expected_result == returned_result[1].execution_metrics
def test_scenario_seven(mocker):
"""
Given: 1 API calls which fails on quota error
When: API Metrics Validation scenario seven is run
Then: 1 scheduled command results, and no execution metrics
"""
mocker.patch('CommonServerPython.ScheduledCommand.raise_error_if_not_supported')
expected_result = {
'items_to_schedule': [[
'oneone',
'twotwo',
'threethree',
'fourfour',
'fivefive',
'sixsix',
'sevenseven',
'eighteight',
'ninenine',
'tenten'
]],
'polling': True
}
returned_result = APIMetricsValidation.scenario_seven()
assert expected_result == returned_result[0].scheduled_command._args
def test_scenario_eight(mocker):
"""
Given: 1 API call which fails on quota error
When: API Metrics Validation scenario eight is run
Then: 1 scheduled command results, and no execution metrics
"""
mocker.patch('CommonServerPython.ScheduledCommand.raise_error_if_not_supported')
expected_result = {
'items_to_schedule': [[
'oneone',
'twotwo',
'threethree',
'fourfour',
'fivefive',
'sixsix',
'sevenseven',
'eighteight',
'ninenine',
'tenten'
]],
'polling': True
}
returned_result = APIMetricsValidation.scenario_eight()
assert expected_result == returned_result[0].scheduled_command._args
def test_scenario_nine(mocker):
"""
Given: 5 API calls which fail on quota error
When: API Metrics Validation scenario nine is run
Then: 1 Execution Metrics containing 5 quota error API calls
"""
mocker.patch('CommonServerPython.ScheduledCommand.raise_error_if_not_supported')
expected_result = [{'APICallsCount': 5, 'Type': 'QuotaError'}]
returned_result = APIMetricsValidation.scenario_nine()
assert expected_result == returned_result[5].execution_metrics
def test_scenario_ten(mocker):
"""
Given: 1 API call which is successful
When: API Metrics Validation scenario ten is run
Then: 1 Execution metric result with one success
"""
mocker.patch('CommonServerPython.ScheduledCommand.raise_error_if_not_supported')
expected_result = [{'APICallsCount': 1, 'Type': 'Successful'}]
returned_result = APIMetricsValidation.scenario_ten()
assert expected_result == returned_result[1].execution_metrics
```
#### File: Scripts/ForescoutEyeInspectButtonGetPCAP/ForescoutEyeInspectButtonGetPCAP.py
```python
from typing import Any, Dict
import demistomock as demisto
from CommonServerPython import *
def get_pcap() -> Dict[str, Any]:
alert_id = demisto.incident()['CustomFields'].get('alertid')
if not alert_id:
return_error('Forescout EyeInspect alert ID is missing inside the incident.')
return demisto.executeCommand('forescout-ei-alert-pcap-get', {'alert_id': alert_id})
def main():
try:
return_results(get_pcap())
except Exception as e:
demisto.error(fix_traceback_line_numbers(traceback.format_exc()))
return_error(f'Failed to get pcap from Forescout EyeInspect incident.\nError:\n{e}')
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Scripts/ForescoutEyeInspectButtonHostChangeLog/ForescoutEyeInspectButtonHostChangeLog_test.py
```python
from datetime import datetime
from pytest_mock.plugin import MockerFixture
import demistomock as demisto
import ForescoutEyeInspectButtonHostChangeLog
from ForescoutEyeInspectButtonHostChangeLog import get_hosts_changelog, main
HOSTS_CHANGELOG_MOCK = [{
'id': 51,
'timestamp': '2022-02-03T07:49:53.000+01:00',
'event_type_id': 'hostcl_new_host',
'event_type_name': 'New host',
'event_category': 'PROPERTIES',
'host_id': 49,
'information_source': 'NETWORK',
'sensor_id': 9,
'username': '',
'old_value': '',
'new_value': '',
'host_address': '192.168.60.192',
'host_vlan': '',
'host_name': '',
'host_ip_reuse_domain_id': 1,
'host_mac_addresses': ['B4:2E:99:C9:5E:75', 'C4:24:56:A4:86:11']
}]
def return_error_mock(message: str, *_):
raise Exception(message)
def test_get_hosts_changelog(mocker: MockerFixture):
mocker.patch.object(demisto, 'incident', return_value={'occurred': datetime.now().isoformat()})
mocker.patch.object(demisto, 'executeCommand', return_value=HOSTS_CHANGELOG_MOCK)
assert get_hosts_changelog()[0]['id'] == 51
def test_command_error(mocker: MockerFixture):
mocker.patch.object(demisto, 'incident', return_value={'occurred': datetime.now().isoformat()})
mocker.patch.object(demisto, 'error')
mocker.patch.object(ForescoutEyeInspectButtonHostChangeLog, 'return_error', return_error_mock)
mocker.patch.object(demisto,
'executeCommand',
side_effect=Exception('Failed to communicate with server'))
try:
main()
except Exception as e:
assert 'Failed to communicate with server' in str(e)
```
#### File: Integrations/GitHubEventCollector/GitHubEventCollector.py
```python
from datetime import datetime
import urllib3
from CommonServerPython import *
import demistomock as demisto
import dateparser
from collections.abc import Generator
from SiemApiModule import * # noqa: E402
urllib3.disable_warnings()
def get_github_timestamp_format(value):
"""Converting int(epoch), str(3 days) or datetime to github's api time"""
timestamp: Optional[datetime]
if isinstance(value, int):
value = str(value)
if not isinstance(value, datetime):
timestamp = dateparser.parse(value)
if timestamp is None:
raise TypeError(f'after is not a valid time {value}')
timestamp_epoch = timestamp.timestamp() * 1000
str_bytes = f'{timestamp_epoch}|'.encode('ascii')
base64_bytes = base64.b64encode(str_bytes)
return base64_bytes.decode('ascii')
class GithubParams(BaseModel):
"""
A class that stores the request query params
"""
include: str
order: str = 'asc'
after: str
per_page: int = 100 # Maximum is 100
_normalize_after = validator('after', pre=True, allow_reuse=True)(
get_github_timestamp_format
)
class GithubEventsRequestConfig(IntegrationHTTPRequest):
url = AnyUrl
method = Method.GET
params: GithubParams # type: ignore
class GithubClient(IntegrationEventsClient):
def set_request_filter(self, after: str):
if self.request.params:
self.request.params.after = get_github_timestamp_format(after) # type: ignore
class GithubGetEvents(IntegrationGetEvents):
def _iter_events(self) -> Generator:
"""
Function that responsible for the iteration over the events returned from github api
"""
events = self.client.call(self.client.request).json()
if not events:
return []
while True:
yield events
last = events.pop()
self.client.set_request_filter(last['@timestamp'])
events = self.client.call(self.client.request).json()
try:
events.pop(0)
assert events
except (IndexError, AssertionError):
LOG('empty list, breaking')
break
@staticmethod
def get_last_run(events: List[dict]) -> dict:
"""
Get the info from the last run, it returns the time to query from and a list of ids to prevent duplications
"""
last_timestamp = events[-1]['@timestamp']
last_time = last_timestamp / 1000
next_fetch_time = datetime.fromtimestamp(last_time) + timedelta(
seconds=1
)
return {'after': next_fetch_time.isoformat()}
def main():
# Args is always stronger. Get last run even stronger
demisto_params = demisto.params() | demisto.args() | demisto.getLastRun()
should_push_events = argToBoolean(demisto_params.get('should_push_events', 'false'))
headers = {'Authorization': f"Bearer {demisto_params['auth_credendtials']['password']}",
'Accept': 'application/vnd.github.v3+json'}
demisto_params['headers'] = headers
demisto_params['params'] = GithubParams(**demisto_params)
request = GithubEventsRequestConfig(**demisto_params)
options = IntegrationOptions.parse_obj(demisto_params)
client = GithubClient(request, options)
get_events = GithubGetEvents(client, options)
command = demisto.command()
try:
if command == 'test-module':
get_events.run()
return_results('ok')
elif command in ('github-get-events', 'fetch-events'):
events = get_events.run()
if command == 'fetch-events':
send_events_to_xsiam(events, 'github', demisto_params.get('product'))
demisto.setLastRun(GithubGetEvents.get_last_run(events))
elif command == 'github-get-events':
command_results = CommandResults(
readable_output=tableToMarkdown('Github Logs', events, headerTransform=pascalToSpace),
outputs_prefix='Github.Logs',
outputs_key_field='@timestamp',
outputs=events,
raw_response=events,
)
return_results(command_results)
if should_push_events:
send_events_to_xsiam(events, 'github', demisto_params.get('product'))
except Exception as e:
return_error(str(e))
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Integrations/GLPI/GLPI.py
```python
import demistomock as demisto
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa
''' IMPORTS '''
import json
import os
from html import unescape
import bcrypt
import dateparser
import requests
from glpi_api import GLPI
''' CONSTANTS, GLPI DATA '''
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
MAX_INCIDENTS_TO_FETCH = 50
"""Manifest when uploading a document passed as JSON in the multipart/form-data POST
request. Note the double curly is used for representing only one curly."""
UPLOAD_MANIFEST = '{{ "input": {{ "name": "{name:s}", "_filename" : ["{filename:s}"] }} }}'
"""Warning when we need to delete an incomplete document due to upload error."""
WARN_DEL_DOC = (
"The file could not be uploaded but a document with id '{:d}' was created, "
"this document will be purged.")
"""Warning when an invalid document could not be purged."""
WARN_DEL_ERR = ("The created document could not be purged, you may need to clean it manually: {:s}")
GLPI_ARGS = ['id', 'entities_id', 'name', 'date', 'closedate', 'solvedate', 'date_mod', 'users_id_lastupdater', 'status',
'users_id_recipient', 'requesttypes_id', 'content', 'urgency', 'impact', 'priority',
'itilcategories_id', 'type', 'global_validation', 'slas_id_ttr', 'slas_id_tto', 'slalevels_id_ttr',
'time_to_resolve', 'time_to_own', 'begin_waiting_date', 'sla_waiting_duration', 'ola_waiting_duration',
'olas_id_tto', 'olas_id_ttr', 'olalevels_id_ttr', 'ola_ttr_begin_date', 'internal_time_to_resolve',
'internal_time_to_own', 'waiting_duration', 'close_delay_stat', 'solve_delay_stat',
'takeintoaccount_delay_stat', 'actiontime', 'is_deleted', 'locations_id', 'validation_percent',
'date_creation', 'links']
MIRROR_DIRECTION = {
'None': None,
'Incoming': 'In',
'Outgoing': 'Out',
'Incoming And Outgoing': 'Both'
}
TICKET_TYPE = {
'Incident': 1,
'Request': 2
}
TICKET_HIGHLOW = {
'Veryhigh': 5,
'High': 4,
'Medium': 3,
'Low': 2,
'Verylow': 1
}
TICKET_MAJORLOW = {
'Major': 6,
'Veryhigh': 5,
'High': 4,
'Medium': 3,
'Low': 2,
'Verylow': 1
}
TICKET_STATUS = {
'New': 1,
'Processing(assigned)': 2,
'Processing(planned)': 3,
'Pending': 4,
'Solved': 5,
'Closed': 6
}
TICKET_LINK = {
'Link': 1,
'Duplicate': 2,
'Child': 3,
'Parent': 4
}
USER_TYPE = {
'REQUESTER': 1,
'ASSIGNED': 2,
'WATCHER': 3
}
TICKET_FIELDS = (
'closedate',
'content',
'date',
'id',
'impact',
'internal_time_to_own',
'internal_time_to_resolve',
'itilcategories_id',
'name',
'priority',
'requesttypes_id',
'solvedate',
'status',
'time_to_own',
'type',
'urgency'
)
class myglpi(GLPI):
def upload_document(self, name, filepath, fhandler=None, doc_name=None):
"""`API documentation
<https://github.com/glpi-project/glpi/blob/master/apirest.md#upload-a-document-file>`__
Upload the file at ``filepath`` as a document named ``name``.
.. code::
glpi.upload_document("My test document", '/path/to/file/locally')
{'id': 55,
'message': 'Item successfully added: My test document',
'upload_result': {'filename': [{'name': ...}]}}
There may be errors while uploading the file (like a non managed file type).
In this case, the API create a document but without a file attached to it.
This method raise a warning (and another warning if the document could not
be deleted for some reasons) and purge the created but incomplete document.
"""
if not doc_name:
doc_name = name
if not fhandler:
fhandler = open(filepath, 'rb')
response = requests.post(
url=self._set_method('Document'),
headers={
'Session-Token': self.session.headers['Session-Token'],
'App-Token': self.session.headers['App-Token']
},
files={
'uploadManifest': (
None,
UPLOAD_MANIFEST.format(
name=doc_name,
filename=name
),
'application/json'
),
'filename[0]': (name, fhandler)
}
)
if response.status_code != 201:
DemistoException(response)
doc_id = response.json()['id']
error = response.json()['upload_result']['filename'][0].get('error', None)
if error is not None:
demisto.error(WARN_DEL_DOC.format(doc_id), UserWarning)
try:
self.delete('Document', {'id': doc_id}, force_purge=True)
except DemistoException as err:
demisto.error(WARN_DEL_ERR.format(doc_id + ' ' + str(err)), UserWarning)
raise DemistoException('(ERROR_GLPI_INVALID_DOCUMENT) {:s}'.format(error))
return response.json()
class Client(BaseClient):
"""
implement the GLPI API
"""
def __init__(self, params):
super().__init__(
base_url=params['base_url'],
verify=params['verify'],
proxy=params['proxy']
)
self.glpi = myglpi(params['base_url'], params['app_token'], params['auth_token'])
def test(self):
res = self.glpi.get_full_session()
return res
def get_ticket(self, ticket_id):
res = self.glpi.get_item('ticket', ticket_id)
return res
def get_item(self, item_type, item_id):
res = self.glpi.get_item(item_type, item_id)
return res
def get_user(self, user_id):
res = self.glpi.get_item('user', user_id)
return res
def get_ticket_users(self, ticket_id):
res = self.glpi.get_sub_items('ticket', ticket_id, 'Ticket_User', expand_dropdowns=True)
return res
def get_ticket_groups(self, ticket_id):
res = self.glpi.get_sub_items('ticket', ticket_id, 'Group_Ticket', expand_dropdowns=True)
return res
def get_ticket_docs(self, ticket_id):
res = self.glpi.get_sub_items('ticket', ticket_id, 'Document_Item')
return res
def get_ticket_comments(self, ticket_id):
res = self.glpi.get_sub_items('ticket', ticket_id, 'ticketfollowup', expand_dropdowns=True)
return res
def download_document(self, doc_id, dirpath="/tmp", filename=None):
res = self.glpi.download_document(doc_id, dirpath, filename)
return res
def upload_document(self, file_name, file_path, fhandler=None, doc_name=None):
res = self.glpi.upload_document(file_name, file_path, fhandler, doc_name)
return res
def get_profile_list(self):
res = self.glpi.get_all_items("Profile")
return res
def add_link(self, ticket_id_1, ticket_id_2, link):
res = self.glpi.add('ticket_ticket', {'tickets_id_1': ticket_id_1, 'tickets_id_2': ticket_id_2, 'link': link})
return res
def create_user(self, userinfo):
res = self.glpi.add("user", userinfo)
return res
def update_user(self, userinfo):
res = self.glpi.update("user", userinfo)
return res
def link_document_to_ticket(self, document_id, ticket_id):
res = self.glpi.add('Document_Item', {'documents_id': document_id, 'itemtype': 'ticket', 'items_id': ticket_id})
return res
def add_comment(self, ticket_id, content):
res = self.glpi.add('ticketfollowup', {
"tickets_id": ticket_id,
"is_private": "0",
"requesttypes_id": 1,
"content": content
})
return res
def delete_user(self, userid, purge):
res = self.glpi.delete("user", {'id': userid}, force_purge=purge)
return res
def disable_user(self, userid):
res = self.glpi.update("user", {'id': userid, 'is_active': '0'})
return res
def enable_user(self, userid):
res = self.glpi.update("user", {'id': userid, 'is_active': '1'})
return res
def get_user_id(self, username):
criteria = [{'field': 1, 'searchtype': 'contains', 'value': '^' + username + '$'}]
forcedisplay = [2]
res = self.glpi.search('user', criteria=criteria, forcedisplay=forcedisplay)[0]['2']
return res
def list_incidents(self, last_fetch):
criteria = [{'field': 15, 'searchtype': 'morethan', 'value': last_fetch}]
res = self.glpi.search('ticket', criteria=criteria)
return res
def modified_incidents(self, last_fetch, srange):
criteria = [{'field': 19, 'searchtype': 'morethan', 'value': last_fetch}]
res = self.glpi.search('ticket', criteria=criteria, range=srange)
return res
def update_ticket(self, data):
res = self.glpi.update('ticket', data)
return res
def create_ticket(self, data):
res = self.glpi.add('ticket', data)
return res
def close_ticket(self, ticket_id):
res = self.glpi.update("ticket", {'id': ticket_id, 'status': 6})
return res
def delete_ticket(self, ticket_id, purge=False):
res = self.glpi.delete("ticket", {'id': ticket_id}, force_purge=purge)
return res
def search(self, item_type, query, display):
res = self.glpi.search(item_type, criteria=query, forcedisplay=display, uid_cols=True)
return res
def test_module(params):
"""
Returning 'ok' indicates that the integration works like it is supposed to. Connection to the service is successful.
Args:
client: HelloWorld client
Returns:
'ok' if test passed, anything else will fail the test.
"""
try:
client = Client(params)
result = client.test()
if 'valid_id' in result:
return 'ok'
return 'Test Failed! Check your GLPI server'
except Exception as e:
if 'ERROR_WRONG_APP_TOKEN_PARAMETER' in str(e):
return 'Test Failed! Authentication Error: ' + str(e)
else:
return 'Test Failed! Make sure the URL is correctly set. Error: ' + str(e)
def get_profile_id_helper(client, args):
profile_name = args.get('profile')
profile_id = None
if profile_name is not None:
profile_list = client.get_profile_list()
for profile in profile_list:
if profile['name'] == profile_name:
profile_id = profile['id']
if profile_id is None:
raise DemistoException('Profile does not exist')
return profile_id
def get_user_id_helper(client, args):
user_name = args.get('name')
user_id = None
user_id = client.get_user_id(user_name)
if user_id is None:
raise DemistoException('User does not exist')
return user_id
def get_ticket_users_helper(client, ticket_id):
requester = []
assigned = []
watcher = []
users = client.get_ticket_users(ticket_id)
for user in users:
if user['type'] == USER_TYPE['REQUESTER']:
requester.append(user['users_id'])
elif user['type'] == USER_TYPE['ASSIGNED']:
assigned.append(user['users_id'])
elif user['type'] == USER_TYPE['WATCHER']:
watcher.append(user['users_id'])
return requester, assigned, watcher
def get_ticket_groups_helper(client, ticket_id):
requester = []
assigned = []
watcher = []
groups = client.get_ticket_groups(ticket_id)
for group in groups:
if group['type'] == 1:
requester.append(group['groups_id'])
elif group['type'] == 2:
assigned.append(group['groups_id'])
elif group['type'] == 3:
watcher.append(group['groups_id'])
return requester, assigned, watcher
def get_ticket_docs_helper(client, ticket_id):
docs = client.get_ticket_docs(ticket_id)
files = []
if docs:
for doc in docs:
display_name = client.get_item('Document', doc['documents_id']).get('filename')
file = client.download_document(doc['documents_id'], filename=display_name)
filename = os.path.split(file)[1]
f = open(file, 'rb')
data = f.read()
files.append(fileResult(filename, data))
return files
def ticket_format(args):
ticket_fields = {}
for arg in GLPI_ARGS:
input_arg = args.get(arg)
if input_arg:
if arg in ['impact', 'urgency']:
ticket_fields[arg] = TICKET_HIGHLOW.get(input_arg)
elif arg == 'priority':
ticket_fields[arg] = TICKET_MAJORLOW.get(input_arg)
elif arg == 'status':
ticket_fields[arg] = TICKET_STATUS.get(input_arg)
elif arg == 'type':
ticket_fields[arg] = TICKET_TYPE.get(input_arg)
else:
ticket_fields[arg] = input_arg
return ticket_fields
def output_format(res, output_type=None, readable=None):
if res:
if isinstance(res, list):
keys = res[0].keys()
elif isinstance(res, str):
return CommandResults(outputs_prefix='GLPI.' + output_type,
outputs_key_field="id",
outputs=res,
raw_response=res)
else:
keys = res.keys()
key_list = [key for key in keys]
if not output_type:
output_type = key_list[0].split(".")[0]
if not readable:
readable = output_type
result = CommandResults(outputs_prefix='GLPI.' + output_type,
outputs_key_field="id",
outputs=res,
raw_response=res,
readable_output=tableToMarkdown(name='GLPI ' + readable, t=res, headers=key_list))
return result
else:
return "No result"
def split_fields(fields: str = '', delimiter: str = ';') -> dict:
dic_fields = {}
if fields:
if '=' not in fields:
raise Exception(
f"The argument: {fields}.\nmust contain a '=' to specify the keys and values. e.g: key=val.")
arr_fields = fields.split(delimiter)
for f in arr_fields:
field = f.split('=', 1) # a field might include a '=' sign in the value. thus, splitting only once.
if len(field) > 1:
dic_fields[field[0]] = field[1]
return dic_fields
def upload_files(client, entries, ticket_id=None, filename=None, doc_name=None):
entry_ids = argToList(entries)
if filename:
entry_names = argToList(filename)
files = {entry_ids[i]: entry_names[i] for i in range(len(entry_names))}
for entry in entry_ids:
path_res = demisto.getFilePath(entry)
full_file_name = path_res.get('name')
file_extension = os.path.splitext(full_file_name)[1]
if filename:
full_file_name = files[entry]
filename = os.path.split(path_res.get('path'))[1]
with open(path_res.get('path'), "rb") as fhandler:
if not file_extension:
file_extension = ''
up = client.upload_document(full_file_name, path_res.get('path'), fhandler, doc_name)
if ticket_id:
client.link_document_to_ticket(up['id'], ticket_id)
return up
def upload_file_command(client, args):
entries = args.get('entryid')
filename = args.get('filename')
doc_name = args.get('doc_name')
res = upload_files(client, entries, None, filename, doc_name)
result = output_format(res, 'Document', 'Document successfully added with ID : ' + str(res['id']))
return result
def get_user_id_command(client, args):
user_id = get_user_id_helper(client, args)
if user_id:
res_format = {
'id': user_id,
'username': args.get('name')
}
result = CommandResults(outputs_prefix='GLPI.User',
outputs_key_field=['id', 'username'],
outputs=res_format,
raw_response=res_format,
readable_output=tableToMarkdown(name='GLPI username', t=res_format, headers=['id', 'username']))
return result
else:
raise DemistoException('Username does not exist')
def get_user_name_command(client, args):
user_id = args.get('id')
res = client.get_user(user_id)
if res:
user_name = res['name']
res_format = {
'id': user_id,
'username': user_name
}
result = CommandResults(outputs_prefix='GLPI.User',
outputs_key_field=['id', 'username'],
outputs=res_format,
raw_response=res_format,
readable_output=tableToMarkdown(name='GLPI username', t=res_format, headers=['id', 'username']))
return result
else:
raise DemistoException('User ID does not exist')
def create_user_command(client, args):
username = args.get('name')
firstname = args.get('firstname')
lastname = args.get('lastname')
email = args.get('email')
userpass = args.get('password').encode("utf-8")
bpass = bcrypt.hashpw(userpass, bcrypt.gensalt(rounds=10)).decode("utf-8")
glpi_pass = <PASSWORD>.replace('<PASSWORD>', <PASSWORD>')
additional_fields = split_fields(str(args.get('additional_fields', '')), ';')
profile_id = get_profile_id_helper(client, args)
user = {'name': username,
'realname': lastname,
'_useremails': [email],
'firstname': firstname,
'password': <PASSWORD>,
'_profiles_id': [profile_id]}
if additional_fields:
user.update(additional_fields)
res = client.create_user(user)
result = output_format(res, 'User', 'User successfully added with ID : ' + str(res[0]['id']))
return result
def update_user_command(client, args):
user_id = args.get('id')
user = {'id': user_id}
additional_fields = split_fields(str(args.get('update_fields', '')), ';')
if additional_fields:
user.update(additional_fields)
res = client.update_user(user)
if res[0][str(user_id)] is True:
return output_format(res, 'User', 'User with ID ' + str(user_id) + ' successfully updated')
raise DemistoException('Error when trying to update user ID ' + str(user_id) + ': ' + str(res))
def delete_user_command(client, args):
username = args.get('name')
purge = args.get('purge')
user_id = get_user_id_helper(client, args)
res = client.delete_user(user_id, purge)
if res[0][str(user_id)] is True:
return 'User ' + str(username) + ' successfully deleted'
raise DemistoException('Error when trying to delete user ' + str(username) + ': ' + str(res))
def enable_user_command(client, args):
username = args.get('name')
user_id = get_user_id_helper(client, args)
res = client.enable_user(user_id)
if res[0][str(user_id)] is True:
return 'User ' + str(username) + ' successfully enabled'
raise DemistoException('Error when trying to enable user ' + str(username) + ': ' + str(res))
def disable_user_command(client, args):
username = args.get('name')
user_id = get_user_id_helper(client, args)
res = client.disable_user(user_id)
if res[0][str(user_id)] is True:
return 'User ' + str(username) + ' successfully disabled'
raise DemistoException('Error when trying to disable user ' + str(username) + ': ' + str(res))
def add_comment_command(client, args):
ticket_id = args.get('ticket_id')
text = args.get('comment')
res = client.add_comment(ticket_id, text)
if res:
if "id" in res[0]:
result = output_format(res, 'Comment', 'Comment successfully added to ticket ID : ' + str(ticket_id))
return result
else:
raise DemistoException('Error when trying to add comment: ' + str(res))
def add_link_command(client, args):
ticket_id_1 = args.get('ticket_ID_1')
ticket_id_2 = args.get('ticket_ID_2')
link = TICKET_LINK.get(args.get('link'))
res = client.add_link(ticket_id_1, ticket_id_2, link)
if res:
if "id" in res[0]:
result = output_format(res, 'Link', 'Link successfully added to ticket ID : ' + str(ticket_id_1))
return result
else:
raise DemistoException('Error when trying to add link: ' + str(res))
def create_ticket_command(client, args):
additional_fields = split_fields(str(args.get('additional_fields', '')), ';')
ticket_data = ticket_format(args)
if additional_fields:
ticket_data.update(additional_fields)
# create ticket
ticket = client.create_ticket(ticket_data)
ticket_id = ticket[0].get('id')
# upload files
entries = args.get('entryid')
if entries:
upload_files(client, entries, ticket_id, None, "Document Ticket " + str(ticket_id))
result = output_format(ticket, 'Ticket', 'Ticket successfully created')
return result
def update_ticket_command(client, args):
ticket_id = args.get('id')
additional_fields = split_fields(str(args.get('additional_fields', '')), ';')
ticket_data = ticket_format(args)
if additional_fields:
ticket_data.update(additional_fields)
res = client.update_ticket(ticket_data)
# upload files
entries = args.get('entryid')
if entries:
upload_files(client, entries, ticket_data['id'], None, "Document Ticket " + str(ticket_data['id']))
if res[0][str(ticket_id)] is True:
return output_format(res, 'Ticket', 'Ticket successfully updated')
raise DemistoException('Error when trying to update ticket ' + ticket_id + ': ' + str(res))
def delete_ticket_command(client, args):
ticket_id = args.get('ticket_id')
purge = args.get('purge')
res = client.delete_ticket(ticket_id, purge)
if res[0][str(ticket_id)] is True:
return 'Ticket ID ' + str(ticket_id) + ' successfully deleted'
raise DemistoException('Error when trying to delete ticket ' + ticket_id + ': ' + str(res))
def get_ticket_command(client, args):
ticket_id = args.get('ticket_id')
res = client.get_ticket(ticket_id)
res['requester_users'], res['assigned_users'], res['watcher_users'] = get_ticket_users_helper(client, ticket_id)
res['requester_groups'], res['assigned_groups'], res['watcher_groups'] = get_ticket_groups_helper(client, ticket_id)
comments = client.get_ticket_comments(ticket_id)
for comment in comments:
html = unescape(comment.get('content'))
comment['content'] = html
res['comments'] = comments
if args.get('get_attachments') is True:
files_entries = get_ticket_docs_helper(client, ticket_id)
for file in files_entries:
demisto.results(file)
result = output_format(res, "Ticket")
return result
def get_item_command(client, args):
item_id = args.get('item_id')
item_type = args.get('item_type')
res = client.get_item(item_type, item_id)
result = output_format(res, item_type)
return result
def search_command(client, args):
item_type = args.get('item_type')
query = argToList(args.get('query'))
forcedisplay = args.get('forcedisplay')
if not query:
query = []
if not forcedisplay:
forcedisplay = []
res = client.search(item_type, query, forcedisplay)
if res:
keys = list(res[0].keys())
output_type = keys[0].split(".")[0]
key_list = []
my_output = {}
for key in keys:
key_list.append(key.replace(output_type + '.', ''))
my_output[key.replace(output_type + '.', '')] = res[0][key]
result = []
result.append(CommandResults(outputs_prefix='GLPI.Search.' + output_type,
outputs_key_field=key_list,
outputs=my_output,
raw_response=my_output,
readable_output=tableToMarkdown(name='GLPI Search', t=my_output, headers=key_list)))
return result
else:
return "Nothing found"
def fetch_incidents(client, last_run, max_results, first_fetch_time):
"""
This function will execute each interval (default is 1 minute).
Args:
client (Client): HelloWorld client
last_run (dateparser.time): The greatest incident created_time we fetched from last fetch
max_results (int): Maximum numbers of incidents per fetch
first_fetch_time (dateparser.time): If last_run is None then fetch all incidents since first_fetch_time
Returns:
next_run: This will be last_run in the next fetch-incidents
incidents: Incidents that will be created in Demisto
"""
# Get the last fetch time, if exists
last_fetch = last_run.get('last_fetch')
# Handle first time fetch
if last_fetch is None:
last_fetch = dateparser.parse(first_fetch_time)
else:
last_fetch = dateparser.parse(last_fetch)
latest_created_time = dateparser.parse(last_fetch.strftime('%Y-%m-%d %H:%M:%S'))
search_date = last_fetch.strftime('%Y-%m-%d %H:%M:%S')
incidents = []
demisto.info(f'Fetching GLPI tickets since: {str(search_date)}')
items = client.list_incidents(search_date)
for item in items:
ticket_id = item.get('2')
ticket = client.get_ticket(ticket_id)
ticket['requester_users'], ticket['assigned_users'], ticket['watcher_users'] = get_ticket_users_helper(client, ticket_id) # noqa: E501
ticket['requester_groups'], ticket['assigned_groups'], ticket['watcher_groups'] = get_ticket_groups_helper(client, ticket_id) # noqa: E501
ticket['content'] = unescape(ticket['content'])
files = []
files_entries = get_ticket_docs_helper(client, ticket_id)
for file in files_entries:
files.append({
'path': file.get('FileID', ''),
'name': file.get('File', '')
})
incident_created_time = dateparser.parse(ticket['date'])
ticket['mirror_direction'] = MIRROR_DIRECTION.get(demisto.params().get('mirror_direction'))
ticket['mirror_instance'] = demisto.integrationInstance()
ticket['mirror_tags'] = [
demisto.params().get('comment_tag'),
demisto.params().get('file_tag'),
demisto.params().get('work_notes_tag')
]
demisto.debug(f'Incident with ID {ticket_id} and name {ticket["name"]} occured: {str(incident_created_time.strftime(DATE_FORMAT))}') # type: ignore[union-attr] # noqa: E501
incident = {
'name': ticket['name'],
'occurred': incident_created_time.strftime(DATE_FORMAT), # type: ignore[union-attr]
'attachment': files,
'rawJSON': json.dumps(ticket)
}
incidents.append(incident)
# Update last run and add incident if the incident is newer than last fetch
if incident_created_time > latest_created_time: # type: ignore[operator]
latest_created_time = incident_created_time
if len(incidents) >= max_results:
demisto.debug('max_results reached')
break
next_run = {'last_fetch': latest_created_time.strftime(DATE_FORMAT)} # type: ignore[union-attr]
return next_run, incidents
def get_mapping_fields_command() -> GetMappingFieldsResponse:
"""
Returns the list of fields for an incident type.
Args:
client: XSOAR client to use
Returns: Dictionary with keys as field names
"""
mapping_response = GetMappingFieldsResponse()
incident_type_scheme = SchemeTypeMapping(type_name="GLPI Incident")
for field in GLPI_ARGS:
incident_type_scheme.add_field(field)
mapping_response.add_scheme_type(incident_type_scheme)
request_type_scheme = SchemeTypeMapping(type_name="GLPI Request")
for field in GLPI_ARGS:
request_type_scheme.add_field(field)
mapping_response.add_scheme_type(request_type_scheme)
return mapping_response
def get_remote_data_command(client, args, params={}):
"""
get-remote-data command: Returns an updated incident and entries
Args:
client: XSOAR client to use
args:
id: incident id to retrieve
lastUpdate: when was the last time we retrieved data
Returns:
List[Dict[str, Any]]: first entry is the incident (which can be completely empty) and the new entries.
"""
parsed_args = GetRemoteDataArgs(args)
# ticket_id = args.get('id', '')
ticket_id = parsed_args.remote_incident_id
last_update = args.get('lastUpdate')
demisto.debug(f'Getting update for remote id {ticket_id} with last_update: {str(last_update)}')
formated_date = last_update.replace('T', ' ').split('.')[0]
try:
new_incident_data = client.get_ticket(ticket_id)
entries = []
demisto.debug(f'fetch files for ticket with id {ticket_id}')
ticket_docs = client.get_ticket_docs(ticket_id)
if ticket_docs:
for ticket_doc in ticket_docs:
if ticket_doc.get('date_mod') > formated_date:
document = client.get_item('Document', ticket_doc.get('documents_id'))
if '_mirrored_from_xsoar' not in document.get('filename'):
file = client.download_document(ticket_doc.get('documents_id'), filename=document.get('filename'))
demisto.debug(f'file {document.get("filename")} fetched for ticket with id {ticket_id}')
filename = os.path.split(file)[1]
f = open(file, 'rb')
data = f.read()
entries.append(fileResult(filename, data))
comments_result = client.get_ticket_comments(ticket_id)
if comments_result:
for note in comments_result:
if 'Mirrored from Cortex XSOAR' not in note.get('content') and note.get('date_mod') > formated_date:
comments_context = {'comments_and_work_notes': unescape(note.get('content'))}
entries.append({
'ContentsFormat': formats['html'],
'Type': entryTypes['note'],
'Contents': unescape(note.get('content')),
'Note': True,
'EntryContext': comments_context
})
demisto.debug(f'Pull result is {new_incident_data}')
return GetRemoteDataResponse(new_incident_data, entries)
except Exception as e:
raise DemistoException(f'Error in incoming mirror for incident id {ticket_id}. :Error message: {str(e)}')
def update_remote_system_command(client: Client, args: Dict[str, Any]) -> str:
"""update-remote-system command: pushes local changes to the remote system
:type client: ``Client``
:param client: XSOAR client to use
:type args: ``Dict[str, Any]``
:param args:
all command arguments, usually passed from ``demisto.args()``.
``args['data']`` the data to send to the remote system
``args['entries']`` the entries to send to the remote system
``args['incidentChanged']`` boolean telling us if the local incident indeed changed or not
``args['remoteId']`` the remote incident id
:return:
``str`` containing the remote incident id - really important if the incident is newly created remotely
:rtype: ``str``
"""
parsed_args = UpdateRemoteSystemArgs(args)
if parsed_args.delta:
demisto.debug(f'Got the following delta keys {str(list(parsed_args.delta.keys()))}')
demisto.debug(f'Sending incident with remote ID [{parsed_args.remote_incident_id}] to remote system\n')
new_incident_id: str = parsed_args.remote_incident_id
updated_incident = {}
if not parsed_args.remote_incident_id or parsed_args.incident_changed:
if parsed_args.remote_incident_id:
old_incident = client.get_ticket(parsed_args.remote_incident_id)
for changed_key in parsed_args.delta.keys():
if changed_key in TICKET_FIELDS:
old_incident[changed_key] = parsed_args.delta[changed_key] # type: ignore
parsed_args.data = old_incident
else:
parsed_args.data['createInvestigation'] = True
updated_incident = client.update_ticket(parsed_args.data)
else:
demisto.debug(f'Skipping updating remote incident fields [{parsed_args.remote_incident_id}] as it is '
f'not new nor changed.')
# Close incident if relevant
if updated_incident and parsed_args.inc_status == IncidentStatus.DONE:
demisto.debug(f'Closing remote incident {new_incident_id}')
client.close_ticket(new_incident_id)
entries = parsed_args.entries
if entries:
demisto.debug(f'New entries {entries}')
for entry in entries:
demisto.debug(f'Sending entry {entry.get("id")}, type: {entry.get("type")}')
# Mirroring files as entries
if entry.get('type') == 3:
path_res = demisto.getFilePath(entry.get('id'))
demisto.debug('path res' + str(path_res))
full_file_name = path_res.get('name')
file_name, file_extension = os.path.splitext(full_file_name)
if not file_extension:
file_extension = ''
up = client.upload_document(file_name + '_mirrored_from_xsoar' + file_extension, path_res.get('path'))
client.link_document_to_ticket(up['id'], new_incident_id)
else:
# Mirroring comment and work notes as entries
user = entry.get('user', 'dbot') or 'dbot'
text = f"({user}): {str(entry.get('contents', ''))}\n\n Mirrored from Cortex XSOAR"
client.add_comment(new_incident_id, text)
return new_incident_id
def get_modified_remote_data_command(client, args, mirror_limit):
remote_args = GetModifiedRemoteDataArgs(args)
last_update = remote_args.last_update
# last_update_utc = dateparser.parse(last_update, settings={'TIMEZONE': 'UTC'}) # convert to utc format
search_range = '0-' + str(mirror_limit)
raw_incidents = client.modified_incidents(last_update, search_range)
modified_incident_ids = list()
for raw_incident in raw_incidents:
incident_id = str(raw_incident.get('2'))
modified_incident_ids.append(incident_id)
return GetModifiedRemoteDataResponse(modified_incident_ids)
def main():
"""
parse and validate integration params
"""
command_list: Dict[str, Any] = {
'glpi-create-ticket': create_ticket_command,
'glpi-update-ticket': update_ticket_command,
'glpi-delete-ticket': delete_ticket_command,
'glpi-get-ticket': get_ticket_command,
'glpi-get-item': get_item_command,
'glpi-add-comment': add_comment_command,
'glpi-add-link': add_link_command,
'glpi-upload-file': upload_file_command,
'glpi-search': search_command,
'glpi-create-user': create_user_command,
'glpi-update-user': update_user_command,
'glpi-delete-user': delete_user_command,
'glpi-enable-user': enable_user_command,
'glpi-disable-user': disable_user_command,
'glpi-get-username': get_user_name_command,
'glpi-get-userid': get_user_id_command,
'get-remote-data': get_remote_data_command
}
params = {
'base_url': urljoin(demisto.params().get('url', ''), ''),
'app_token': demisto.params().get('app_token', ''),
'auth_token': demisto.params().get('user_token', ''),
'verify': not demisto.params().get('insecure', False),
'first_fetch_time': demisto.params().get('fetch_time', '3 days').strip(),
'mirror_limit': demisto.params().get('mirror_limit', '100'),
'proxy': demisto.params().get('proxy', False)
}
cmd = demisto.command()
if cmd == "test-module":
return_results(test_module(params))
try:
client = Client(params)
if cmd == 'get-mapping-fields':
return_results(get_mapping_fields_command())
elif cmd == 'get-modified-remote-data':
return_results(get_modified_remote_data_command(client, demisto.args(), params['mirror_limit']))
elif cmd == 'update-remote-system':
return_results(update_remote_system_command(client, demisto.args()))
elif cmd == 'fetch-incidents':
# Convert the argument to an int using helper function or set to MAX_INCIDENTS_TO_FETCH
max_results = arg_to_number(
arg=demisto.params().get('max_fetch'),
arg_name='max_fetch',
required=False
)
if not max_results or max_results > MAX_INCIDENTS_TO_FETCH:
max_results = MAX_INCIDENTS_TO_FETCH
# Set and define the fetch incidents command to run after activated via integration settings.
new_run, incidents = fetch_incidents(
client=client,
last_run=demisto.getLastRun(),
max_results=max_results,
first_fetch_time=params['first_fetch_time'])
demisto.setLastRun(new_run)
demisto.incidents(incidents)
elif cmd in command_list.keys():
return_results(command_list[cmd](client, demisto.args()))
else:
raise DemistoException('Command "%s" not implemented' % cmd)
# Log exceptions
except Exception as e:
return_error(f'Failed to execute {demisto.command()} command. Error: {str(e)}')
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Scripts/ChronicleIsolatedIPWidgetScript/ChronicleIsolatedIPWidgetScript_test.py
```python
from unittest.mock import patch
import demistomock as demisto
import ChronicleIsolatedIPWidgetScript
INDICATOR_DATA = {'indicator': {'CustomFields': {'chronicleassetip': '0.0.0.0',
'chronicleisolatedip': 'No'}}}
def test_main_success(mocker):
"""
When main function is called, get_html_representation should be called.
"""
mocker.patch.object(demisto, 'args', return_value=INDICATOR_DATA)
mocker.patch.object(ChronicleIsolatedIPWidgetScript, 'get_html_representation',
return_value='')
ChronicleIsolatedIPWidgetScript.main()
assert ChronicleIsolatedIPWidgetScript.get_html_representation.called
@patch('ChronicleIsolatedIPWidgetScript.return_error')
def test_main_failure(mock_return_error, capfd, mocker):
"""
When main function gets some exception then valid message should be printed.
"""
mocker.patch.object(demisto, 'args', return_value=INDICATOR_DATA)
mocker.patch.object(ChronicleIsolatedIPWidgetScript, 'get_html_representation',
side_effect=Exception)
with capfd.disabled():
ChronicleIsolatedIPWidgetScript.main()
mock_return_error.assert_called_once_with('Could not load widget:\n')
def test_get_html_representation_when_no_ip_is_attached():
"""
When no ip is attached, get_html_representation should return html representation accordingly.
"""
html_representation = ChronicleIsolatedIPWidgetScript.get_html_representation("", 'No')
assert "<div style='color:grey; text-align:center;'><h1>No IP Address associated with the ChronicleAsset</h1></div>"\
== html_representation
def test_get_html_representation_when_ip_is_not_isolated():
"""
When ip is not blocked, get_html_representation should return html representation accordingly.
"""
html_representation = ChronicleIsolatedIPWidgetScript.get_html_representation('0.0.0.0', 'No')
assert "<div style='color:green; text-align:center;'><h1>0.0.0.0<br/>IP Address Not Isolated</h1>" \
"</div>" == html_representation
def test_get_html_representation_when_ip_is_potentially_isolated():
"""
When ip is potentially blocked, get_html_representation should return html representation accordingly.
"""
html_representation = ChronicleIsolatedIPWidgetScript\
.get_html_representation('0.0.0.0', 'Yes')
assert "<div style='color:red; text-align:center;'><h1>0.0.0.0<br/>IP Address Isolated</h1>" \
"</div>" == html_representation
```
#### File: Scripts/InvestigationSummaryParse/InvestigationSummaryParse_test.py
```python
import json
from pathlib import Path
import pytest
from CommonServerPython import CommandResults
from InvestigationSummaryParse import parse_command, KillChain, Result, Source
TEST_DATA_DIR = Path(__file__).parent / 'test_data'
def _list_to_context(command_results: list[CommandResults]):
return [result.to_context() for result in command_results]
def _dump_test_file(file_name: str, content: dict):
(TEST_DATA_DIR / file_name).write_text(json.dumps(content))
def _load_test_file(file_name: str):
return json.loads((TEST_DATA_DIR / file_name).read_text())
def test_empty():
"""
Given an empty context
When calling parse_command
Then make sure the result is the default
"""
assert _list_to_context(parse_command(context={})) == _load_test_file('empty_context.json')
@pytest.mark.parametrize('mocked_context,result,sources,search_value,test_index',
[
({}, Result.NOT_DETECTED, set(), '', 0),
({'incident': {'mitretacticname': 'hello'}}, Result.SUSPICIOUS, {Source.EDR}, 'hello', 1),
({'incident': {'mitretacticname': 'hello'},
'csfalconx': {'resource': {'sandbox': {'mitre_attacks': {'tactic': 'hello'}}}}},
Result.SUSPICIOUS, {Source.SANDBOX, Source.EDR}, 'hello', 2),
({'MITREATTACK': [{'value': 'hello'}]}, Result.SUSPICIOUS, {Source.EDR}, 'hello', 3),
])
def test_kill_chain(mocked_context: dict, result: Result, sources: set[Source], search_value: str, test_index: int):
"""
Given a class inheriting from KillChain
When instantiating
Then check the result of KillChain._parse_context
"""
class NewKillChain(KillChain):
def __init__(self, context: dict):
super().__init__(
name='dummy',
tactic='tactic',
context=context,
search_value=search_value
)
test_object = NewKillChain(mocked_context)
assert test_object.result == result
assert test_object.sources == sources
assert test_object.to_context() == _load_test_file(f'kill_chain_{test_index}.json')
```
#### File: Scripts/ReadNetstatFileWrapper/ReadNetstatFileWrapper.py
```python
from CommonServerPython import *
def main():
try:
if 'CrowdStrike' in demisto.context():
return_results(demisto.executeCommand('ReadNetstatFile', {}))
else:
return_results('No data on Netstat found')
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute InvestigationDetailedSummaryToTable. Error: {str(e)}')
if __name__ == '__main__':
main()
```
#### File: Scripts/RemoveFileWrapper/RemoveFileWrapper_test.py
```python
import pytest
import CommonServerPython
import RemoveFileWrapper
test_data = [CommonServerPython.CommandRunner.Result(command='endpoint',
args={
'id': 'device1,device2',
'using-brand': 'CrowdstrikeFalcon'},
brand='CrowdstrikeFalcon',
instance='CrowdstrikeFalcon_instance_1',
result={'errors': [], 'resources': [{
'device_id': 'device1',
'platform_name': 'Windows'}, {
'device_id': 'device2',
'platform_name': 'Linux'}]}
)]
def test_get_crowdstrike_os_to_id(mocker):
"""
Given:
A list of devices ids which are on crowdstrike
When:
Getting the os for each device
Then:
Return a mapping between an OS and a set of device-ids what are on the OS.
"""
from RemoveFileWrapper import get_crowdstrike_os_to_id
mocker.patch.object(RemoveFileWrapper.CommandRunner, 'execute_commands',
return_value=(test_data, []))
os_to_id = get_crowdstrike_os_to_id(['device2', 'device1'])
assert os_to_id == {'Windows': {'device1'}, 'Linux': {'device2'}}
def test_create_command_executors(mocker):
"""
Given:
the action to perform (allow or block)
When:
Calling `create_command_wrappers` to get all the command wrappers for the script.
Then:
Ensure the right commands wrappers are being returned.
"""
from RemoveFileWrapper import demisto, create_commands
device_ids = ['device1',
'device2',
'device3',
'device4']
file_path = 'filepath'
file_hash = 'filehash'
mock_incident_id = 123
mocker.patch.object(demisto, 'incident', return_value={'id': mock_incident_id})
mocker.patch.object(RemoveFileWrapper, 'get_crowdstrike_os_to_id', return_value={'Windows': {'device1', 'device2'},
'Linux': {'device3'},
'Mac': {'device4'}})
commands = create_commands(device_ids, file_path, file_hash)
assert len(commands) == 3
for command in commands:
command_names = set(command.commands)
if 'xdr-run-script-delete-file' in command_names:
assert command_names == {'xdr-run-script-delete-file'}
assert command.args_lst == [{'endpoint_ids': ','.join(device_ids),
'file_path': file_path}]
if 'cs-falcon-rtr-remove-file' in command_names:
assert len(command.commands) == 3
assert len(command.args_lst) == 3
assert command_names == {'cs-falcon-rtr-remove-file'}
assert set(command.args_lst[0].get('host_ids', '').split(',')) == {'device1', 'device2'}
assert command.args_lst[1:] == [
{'host_ids': 'device3',
'file_path': file_path,
'os': 'Linux'
},
{'host_ids': 'device4',
'file_path': file_path,
'os': 'Mac',
}
]
if 'microsoft-ato-stop-and-quarantine-file' in command_names:
assert command.commands == ['microsoft-ato-stop-and-quarantine-file']
assert command.args_lst == [{'machine_id': 'device1,device2,device3,device4',
'file_hash': 'filehash',
'comment': f'Action was taken by Cortex XSOAR - incident #{mock_incident_id}'}]
@pytest.mark.parametrize('approved', ('yes', 'no', False))
def test_approve_action(mocker, approved: bool):
"""
Given a value for the `approve_action` argument
When calling main()
Then make sure an error is raised only when it should (when approved = 'no')
"""
from RemoveFileWrapper import demisto, main
mocker.patch.object(RemoveFileWrapper, 'run_remove_file', return_value=None)
mocker.patch.object(demisto, 'error', return_value=None)
return_error = mocker.patch.object(RemoveFileWrapper, 'return_error', return_value=None)
mocker.patch.object(demisto, 'args',
return_value={'approve_action': approved, 'device_ids': ['device1'], 'file_path': 'file_path'})
main()
assert return_error.call_count == int(approved != 'yes')
@pytest.mark.parametrize('file_path', ('', 'path'))
@pytest.mark.parametrize('file_hash', ('', 'hash'))
def test_created_command_count(mocker, file_path: str, file_hash: str):
"""
Given the file_path and file_hash arguments
When calling create_commands
Then make sure the number of command created is correct
"""
from RemoveFileWrapper import demisto, create_commands
mocker.patch.object(RemoveFileWrapper, 'run_remove_file', return_value=None)
mocker.patch.object(demisto, 'error', return_value=None)
mocker.patch.object(demisto, 'args',
return_value={'approve_action': 'yes', 'device_ids': ['device1'], 'file_path': 'file_path'})
path_based_commands = {'xdr', 'falcon'}
hash_based_commands = {'microsoft_atp'}
expected_commands = (path_based_commands if file_path else set()) | (hash_based_commands if file_hash else set())
commands = create_commands(['id'], file_path, file_hash)
assert len(commands) == len(expected_commands)
```
#### File: Integrations/NetskopeAPIv1/NetskopeAPIv1.py
```python
from copy import deepcopy
from typing import Any, Dict, List, Optional, Tuple
from urllib.parse import urljoin
import urllib3
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
# disable insecure warnings
urllib3.disable_warnings()
DEFAULT_PAGE = 1
DEFAULT_LIMIT = 50
DEFAULT_MAX_FETCH = DEFAULT_LIMIT
DEFAULT_EVENTS_FETCH = DEFAULT_LIMIT
DEFAULT_EVENT_TYPE = 'application'
DEFAULT_FIRST_FETCH = '7 days'
MAX_LIMIT = 100
MAX_FETCH = 200
MAX_EVENTS_FETCH = 200
TIME_PERIOD_MAPPING = {
'Last 60 Minutes': 3600,
'Last 24 Hours': 86400,
'Last 7 Days': 604800,
'Last 30 Days': 2592000,
'Last 60 Days': 5184000,
'Last 90 Days': 7776000
}
class Client(BaseClient):
"""
Client for Netskope RESTful API.
Args:
base_url (str): The base URL of Netskope.
token (str): The token to authenticate against Netskope API.
use_ssl (bool): Specifies whether to verify the SSL certificate or not.
use_proxy (bool): Specifies if to use XSOAR proxy settings.
"""
def __init__(self, base_url: str, token: str, use_ssl: bool, use_proxy: bool):
super().__init__(urljoin(base_url, '/api/v1/'), verify=use_ssl, proxy=use_proxy)
self._session.params['token'] = token
def list_events_request(self,
query: Optional[str] = None,
event_type: Optional[str] = None,
timeperiod: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
insertion_start_time: Optional[int] = None,
insertion_end_time: Optional[int] = None,
limit: Optional[int] = None,
skip: Optional[int] = None,
unsorted: Optional[bool] = None) -> Dict[str, Any]:
"""
Get events extracted from SaaS traffic and or logs.
Args:
query (Optional[str]): Free query to filter the events.
event_type (Optional[str]): Select events by their type.
timeperiod (Optional[int]): Get all events from a certain time period.
start_time (Optional[int]): Restrict events to those that have timestamps greater than the provided timestamp.
end_time (Optional[int]): Restrict events to those that have timestamps less than or equal to the provided timestamp.
insertion_start_time (Optional[int]): Restrict events to those that were inserted to the system
after the provided timestamp.
insertion_end_time (Optional[int]): Restrict events to those that were inserted to the system
before the provided timestamp.
limit (Optional[int]): The maximum amount of events to retrieve (up to 10000 events).
skip (Optional[int]): The skip number of the events to retrieve (minimum is 1).
unsorted (Optional[bool]): If true, the returned data will not be sorted (useful for improved performance).
Returns:
Dict[str, Any]: Netskope events.
"""
body = remove_empty_elements({
'query': query,
'type': event_type,
'timeperiod': timeperiod,
'starttime': start_time,
'endtime': end_time,
'insertionstarttime': insertion_start_time,
'insertionendtime': insertion_end_time,
'limit': limit,
'skip': skip,
'unsorted': unsorted
})
return self._http_request(method='POST', url_suffix='events', json_data=body)
def list_alerts_request(self,
query: Optional[str] = None,
alert_type: Optional[str] = None,
acked: Optional[bool] = None,
timeperiod: Optional[int] = None,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
insertion_start_time: Optional[int] = None,
insertion_end_time: Optional[int] = None,
limit: Optional[int] = None,
skip: Optional[int] = None,
unsorted: Optional[bool] = None) -> Dict[str, Any]:
"""
Get alerts generated by Netskope, including policy, DLP, and watch list alerts.
Args:
query (Optional[str]): Free query to filter the alerts.
alert_type (Optional[str]): Select alerts by their type.
acked (Optional[bool]): Whether to retrieve acknowledged alerts or not.
timeperiod (Optional[int]): Get alerts from certain time period.
start_time (Optional[int]): Restrict alerts to those that have timestamps greater than the provided timestamp.
end_time (Optional[int]): Restrict alerts to those that have timestamps less than or equal to the provided timestamp.
insertion_start_time (Optional[int]): Restrict alerts which have been inserted into the system
after the provided timestamp.
insertion_end_time (Optional[int]): Restrict alerts which have been inserted into the system
before the provided timestamp.
limit (Optional[int]): The maximum number of alerts to return (up to 10000).
skip (Optional[int]): The skip number of the alerts to retrieve (minimum is 1).
unsorted (Optional[bool]): If true, the returned data will not be sorted (useful for improved performance).
Returns:
Dict[str, Any]: Netskope alerts.
"""
body = remove_empty_elements({
'query': query,
'alert_type': alert_type,
'acked': acked,
'timeperiod': timeperiod,
'starttime': start_time,
'endtime': end_time,
'insertionstarttime': insertion_start_time,
'insertionendtime': insertion_end_time,
'limit': limit,
'skip': skip,
'unsorted': unsorted
})
return self._http_request(method='POST', url_suffix='alerts', json_data=body)
def list_quarantined_files_request(self,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
limit: Optional[int] = None,
skip: Optional[int] = None) -> Dict[str, Any]:
"""
List all quarantined files.
Args:
start_time (Optional[int]): Get files last modified within a certain time period.
end_time (Optional[int]): Get files last modified within a certain time period.
limit (Optional[int]): The maximum amount of clients to retrieve (up to 10000).
skip (Optional[int]): The skip number of the clients to retrieve (minimum is 1).
Returns:
Dict[str, Any]: Netskope quarantine files.
"""
body = remove_empty_elements({
'starttime': start_time,
'endtime': end_time,
'limit': limit,
'skip': skip,
'op': 'get-files'
})
return self._http_request(method='POST', url_suffix='quarantine', json_data=body)
def get_quarantined_file_request(self, quarantine_profile_id: str, file_id: str) -> bytes:
"""
Download a quarantined file.
Args:
quarantine_profile_id (str): The ID of quarantine profile.
file_id (str): The ID of the quarantined file.
Returns:
bytes: The quarantined file content.
"""
body = {
'quarantine_profile_id': quarantine_profile_id,
'file_id': file_id,
'op': 'download-url'
}
return self._http_request(method='POST',
url_suffix='quarantine',
json_data=body,
resp_type='content')
def update_quarantined_file_request(self, quarantine_profile_id: str, file_id: str,
action: str) -> None:
"""
Take an action on a quarantined file.
Args:
quarantine_profile_id (str): The profile id of the quarantined file.
file_id (str): The id of the quarantined file.
action (str): Action to be performed on a quarantined.
"""
body = {
'quarantine_profile_id': quarantine_profile_id,
'file_id': file_id,
'action': action,
'op': 'take-action'
}
self._http_request(method='POST', url_suffix='quarantine', json_data=body, resp_type='text')
def update_url_list_request(self, name: str, urls: List[str]) -> None:
"""
Update the URL List with the values provided.
Args:
name (str): Name of an existing URL List shown in the Netskope UI on the URL List skip.
urls (List[str]): The content of the URL list.
"""
body = {'name': name, 'list': ','.join(urls)}
self._http_request(method='POST', url_suffix='updateUrlList', json_data=body)
def update_file_hash_list_request(self, name: str, hashes: List[str]) -> None:
"""
Update file hash list with the values provided.
Args:
name (str): Name of an existing file hash list shown in the Netskope UI on the file hash list skip.
hashes (str): List of file hashes (md5 or sha256).
"""
body = {'name': name, 'list': ','.join(hashes)}
return self._http_request(method='POST', url_suffix='updateFileHashList', json_data=body)
def list_clients_request(self,
query: Optional[str] = None,
limit: Optional[int] = None,
skip: Optional[int] = None) -> Dict[str, Any]:
"""
Get information about the Netskope clients.
Args:
query (Optional[str]): Free query on the clients, based on the client fields.
limit (Optional[int]): The maximum amount of clients to retrieve (up to 10000).
skip (Optional[int]): The skip number of the clients to retrieve (minimum is 1).
Returns:
Dict[str, Any]: The clients information.
"""
body = remove_empty_elements({'query': query, 'limit': limit, 'skip': skip})
return self._http_request(method='POST', url_suffix='clients', params=body)
def _http_request(self, *args, **kwargs):
response = super()._http_request(*args, **kwargs)
if isinstance(response, dict) and 'errors' in response:
errors = '\n'.join(response['errors'])
raise DemistoException(f'Invalid API call: {errors}', res=response)
return response
def arg_to_boolean(arg: Optional[str]) -> Optional[bool]:
"""
Converts an XSOAR argument to a Python boolean or None.
Args:
arg (Optional[str]): The argument to convert.
Returns:
Optional[bool]: A boolean if arg can be converted,
or None if arg is None.
"""
if arg is None:
return None
return argToBoolean(arg)
def arg_to_seconds_timestamp(arg: Optional[str]) -> Optional[int]:
"""
Converts an XSOAR date string argument to a timestamp in seconds.
Args:
arg (Optional[str]): The argument to convert.
Returns:
Optional[int]: A timestamp if arg can be converted,
or None if arg is None.
"""
if arg is None:
return None
return date_to_seconds_timestamp(arg_to_datetime(arg))
def date_to_seconds_timestamp(date_str_or_dt: Union[str, datetime]) -> int:
"""
Converts date string or datetime object to a timestamp in seconds.
Args:
date_str_or_dt (Union[str, datetime]): The datestring or datetime.
Returns:
int: The timestamp in seconds.
"""
return date_to_timestamp(date_str_or_dt) // 1000
def validate_time_arguments(start_time: Optional[int] = None,
end_time: Optional[int] = None,
insertion_start_time: Optional[int] = None,
insertion_end_time: Optional[int] = None,
timeperiod: Optional[int] = None) -> None:
"""
Validates time arguments from the user.
The user must provide one of the following:
- start_time and end_time.
- insertion_start_time and insertion_end_time.
- timeperiod.
Args:
start_time (Optional[int], optional): The start time to fetch from the API.
end_time (Optional[int], optional): The end time to fetch from the API.
insertion_start_time (Optional[int], optional): The insertion start time to fetch from the API.
insertion_end_time (Optional[int], optional): The insertion end time to fetch from the API.
timeperiod (Optional[str], optional): The timeperiod to fetch from the API.
Raises:
DemistoException: The user did not provide valid timestamp.
"""
combination = (all((start_time, end_time)), all(
(insertion_start_time, insertion_end_time)), bool(timeperiod))
if not any(combination):
raise DemistoException('Missing time arguments. Please provide start_time and end_time, '
'or insertion_start_time and or insertion_end_time or timeperiod.')
if combination.count(True) > 1:
raise DemistoException(
'Invalid time arguments. Please provide only start_time and end_time, '
'or insertion_start_time and or insertion_end_time or timeperiod. '
'You must not combine between the mentioned options.')
def validate_fetch_params(max_fetch: int, max_events_fetch: int, fetch_events: bool,
first_fetch: str, event_types: List[str]) -> None:
"""
Validates the parameters for fetch incident command.
Args:
max_fetch: (int): The maximum number of incidents for one fetch.
max_events_fetch (int) The maximum number of events per incident for one fetch.
fetch_events (bool): Whether or not fetch events when fetching incident.
first_fetch: (str): First fetch time in words.
"""
if first_fetch:
arg_to_datetime(first_fetch) # verify that it is a date.
if max_fetch > MAX_FETCH:
return_error(f'The Maximum number of incidents per fetch should not exceed {MAX_FETCH}.')
if fetch_events and max_events_fetch > MAX_EVENTS_FETCH:
return_error(
f'The Maximum number of events for each incident per fetch should not exceed {MAX_EVENTS_FETCH}.'
)
if not isinstance(event_types, list):
return_error('The fetched event types must be a list.')
def get_pagination_readable_message(header: str, page: int, limit: int) -> str:
return f'{header}\n Current page size: {limit}\n Showing page {page} out of others that may exist.'
def get_pagination_arguments(args: Dict[str, Any]) -> Tuple[int, int, int]:
"""
Gets and validates pagination arguments for client (skip and limit).
Args:
args (Dict[str, Any]): The command arguments (page and limit).
Returns:
Tuple[int, int]: The page, calculated skip and limit after validation.
"""
page = arg_to_number(args.get('page', DEFAULT_PAGE))
limit = arg_to_number(args.get('limit', DEFAULT_LIMIT))
if page < 1:
raise DemistoException('Page argument must be greater than 1')
if not 1 <= limit <= MAX_LIMIT:
raise DemistoException(f'Limit argument must be between 1 to {MAX_LIMIT}')
return page, (page - 1) * limit, limit
def list_events_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Get events extracted from SaaS traffic and or logs.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
query = args.get('query')
event_type = args['event_type']
timeperiod = TIME_PERIOD_MAPPING.get(args.get('timeperiod'))
start_time = arg_to_seconds_timestamp(args.get('start_time'))
end_time = arg_to_seconds_timestamp(args.get('end_time'))
insertion_start_time = arg_to_seconds_timestamp(args.get('insertion_start_time'))
insertion_end_time = arg_to_seconds_timestamp(args.get('insertion_end_time'))
page, skip, limit = get_pagination_arguments(args)
unsorted = arg_to_boolean(args.get('unsorted'))
validate_time_arguments(start_time=start_time,
end_time=end_time,
timeperiod=timeperiod,
insertion_start_time=insertion_start_time,
insertion_end_time=insertion_end_time)
response = client.list_events_request(query=query,
event_type=event_type,
timeperiod=timeperiod,
start_time=start_time,
end_time=end_time,
insertion_start_time=insertion_start_time,
insertion_end_time=insertion_end_time,
limit=limit,
skip=skip,
unsorted=unsorted)
outputs = deepcopy(response['data'])
for event in outputs:
event['event_id'] = event['_id']
event['timestamp'] = timestamp_to_datestring(event['timestamp'] * 1000)
readable_output = tableToMarkdown(
get_pagination_readable_message('Events List:', page=page, limit=limit),
outputs,
removeNull=True,
headers=['event_id', 'timestamp', 'type', 'access_method', 'app', 'traffic_type'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Event',
outputs_key_field='event_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def list_alerts_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Get alerts generated by Netskope, including policy, DLP, and watch list alerts.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
query = args.get('query')
alert_type = args.get('alert_type')
acked = arg_to_boolean(args.get('acked'))
timeperiod = TIME_PERIOD_MAPPING.get(args.get('timeperiod'))
start_time = arg_to_seconds_timestamp(args.get('start_time'))
end_time = arg_to_seconds_timestamp(args.get('end_time'))
insertion_start_time = arg_to_seconds_timestamp(args.get('insertion_start_time'))
insertion_end_time = arg_to_seconds_timestamp(args.get('insertion_end_time'))
page, skip, limit = get_pagination_arguments(args)
unsorted = arg_to_boolean(args.get('unsorted'))
validate_time_arguments(start_time=start_time,
end_time=end_time,
timeperiod=timeperiod,
insertion_start_time=insertion_start_time,
insertion_end_time=insertion_end_time)
response = client.list_alerts_request(query=query,
alert_type=alert_type,
acked=acked,
timeperiod=timeperiod,
start_time=start_time,
end_time=end_time,
insertion_start_time=insertion_start_time,
insertion_end_time=insertion_end_time,
limit=limit,
skip=skip,
unsorted=unsorted)
outputs = deepcopy(response['data'])
for alert in outputs:
alert['alert_id'] = alert['_id']
alert['timestamp'] = timestamp_to_datestring(alert['timestamp'] * 1000)
readable_output = tableToMarkdown(
get_pagination_readable_message('Alerts List:', page=page, limit=limit),
outputs,
removeNull=True,
headers=['alert_id', 'alert_name', 'alert_type', 'timestamp', 'action'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Alert',
outputs_key_field='alert_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def list_quarantined_files_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
List all quarantined files.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
start_time = arg_to_seconds_timestamp(args.get('start_time'))
end_time = arg_to_seconds_timestamp(args.get('end_time'))
page, skip, limit = get_pagination_arguments(args)
response = client.list_quarantined_files_request(start_time=start_time,
end_time=end_time,
limit=limit,
skip=skip)
outputs = dict_safe_get(response, ['data', 'quarantined'])
for output in outputs:
for file_output in output['files']:
file_output['quarantine_profile_id'] = output['quarantine_profile_id']
file_output['quarantine_profile_name'] = output['quarantine_profile_name']
outputs = sum((output['files'] for output in outputs), [])
readable_header = get_pagination_readable_message('Quarantined Files List:',
page=page,
limit=limit)
readable_output = tableToMarkdown(readable_header,
outputs,
removeNull=True,
headers=[
'quarantine_profile_id', 'quarantine_profile_name',
'file_id', 'original_file_name', 'policy'
],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Quarantine',
outputs_key_field='file_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def get_quarantined_file_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Download a quarantined file.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
quarantine_profile_id = args['quarantine_profile_id']
file_id = args['file_id']
response = client.get_quarantined_file_request(quarantine_profile_id=quarantine_profile_id,
file_id=file_id)
return fileResult(filename=f'{file_id}.zip', data=response, file_type=EntryType.FILE)
def update_quarantined_file_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Take an action on a quarantined file.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
quarantine_profile_id = args['quarantine_profile_id']
file_id = args['file_id']
action = args['action']
client.update_quarantined_file_request(quarantine_profile_id=quarantine_profile_id,
file_id=file_id,
action=action)
readable_output = f'## The file {file_id} was successfully {action}ed!'
return CommandResults(readable_output=readable_output)
def update_url_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Update the URL List with the values provided.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
name = args['name']
urls = argToList(args['urls'])
client.update_url_list_request(name=name, urls=urls)
outputs = {'name': name, 'URL': urls}
readable_output = f'URL List {name}:\n{", ".join(urls)}'
return CommandResults(outputs_prefix='Netskope.URLList',
outputs_key_field='name',
outputs=outputs,
readable_output=readable_output)
def update_file_hash_list_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Update file hash list with the values provided.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
name = args.get('name')
hashes = argToList(args.get('hash'))
client.update_file_hash_list_request(name=name, hashes=hashes)
outputs = {'name': name, 'hash': hashes}
readable_output = f'Hash List {name}:\n{", ".join(hashes)}'
return CommandResults(outputs_prefix='Netskope.FileHashList',
outputs_key_field='name',
outputs=outputs,
readable_output=readable_output)
def list_clients_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
Get information about the Netskope clients.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
query = args.get('query')
page, skip, limit = get_pagination_arguments(args)
response = client.list_clients_request(query=query, limit=limit, skip=skip)
outputs = [client['attributes'] for client in response['data']]
for output in outputs:
output['client_id'] = output['_id']
readable_header = get_pagination_readable_message('Clients List:', page=page, limit=limit)
readable_output = tableToMarkdown(
readable_header,
outputs,
removeNull=True,
headers=['client_id', 'client_version', 'device_id', 'user_added_time'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Client',
outputs_key_field='client_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def list_host_associated_user_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
List all users of certain host by its hostname.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
hostname = args['hostname']
page, skip, limit = get_pagination_arguments(args)
response = client.list_clients_request(query=f'host_info.hostname eq {hostname}',
limit=limit,
skip=skip)
outputs = sum((client['attributes'].get('users') for client in response['data']), [])
for output in outputs:
output['user_id'] = output['_id']
readable_header = get_pagination_readable_message(f'Users Associated With {hostname}:',
page=page,
limit=limit)
readable_output = tableToMarkdown(readable_header,
outputs,
removeNull=True,
headers=['user_id', 'username', 'user_source'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.User',
outputs_key_field='user_id',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def list_user_associated_host_command(client: Client, args: Dict[str, str]) -> CommandResults:
"""
List all hosts related to a certain username.
Args:
client (client): The Netskope client.
args (Dict[str, Any]): Command arguments from XSOAR.
Returns:
CommandResults: Command results with raw response, outputs and readable outputs.
"""
username = args['username']
page, skip, limit = get_pagination_arguments(args)
response = client.list_clients_request(query=f'username eq {username}', limit=limit, skip=skip)
outputs = []
for client in response['data']:
attributes = client['attributes']
agent_status = dict_safe_get(attributes, ['last_event', 'status'])
outputs.append({'agent_status': agent_status, **attributes['host_info']})
readable_header = get_pagination_readable_message(f'Hosts Associated With {username}:',
page=page,
limit=limit)
readable_output = tableToMarkdown(readable_header,
outputs,
removeNull=True,
headers=['hostname', 'os_version', 'agent_status'],
headerTransform=string_to_table_header)
return CommandResults(outputs_prefix='Netskope.Host',
outputs_key_field='nsdeviceuid',
outputs=outputs,
readable_output=readable_output,
raw_response=response)
def test_module(client: Client, max_fetch: int, first_fetch: str, fetch_events: bool,
max_events_fetch: int, event_types: List[str]) -> str:
"""
Validates all integration parameters, and tests connection to Netskope instance.
"""
validate_fetch_params(max_fetch, max_events_fetch, fetch_events, first_fetch, event_types)
client.list_alerts_request(limit=1,
skip=0,
start_time=date_to_seconds_timestamp(datetime.now()),
end_time=date_to_seconds_timestamp(datetime.now()))
return 'ok'
def fetch_multiple_type_events(client: Client, max_fetch: int, start_time: int,
event_types: List[str],
query: Optional[str]) -> List[Dict[str, Any]]:
"""
Fetches events from multiple types.
The function makes an API call for each type, since the API requires
specifying the event type.
Args:
client (Client): The Netskope client.
max_fetch (int): The maximum amount of events to fetch for each type.
start_time (int): The time to fetch the events from.
event_types (List[str]): The event types to fetch as incidents.
query (Optional[str]): Query for filtering the events.
Returns:
List[Dict[str, Any]]: The fetched events.
"""
events = []
if event_types:
max_fetch = max_fetch // len(event_types)
for event_type in event_types:
new_events = client.list_events_request(start_time=start_time,
end_time=date_to_seconds_timestamp(datetime.now()),
limit=max_fetch,
unsorted=False,
event_type=event_type,
query=query)['data']
for event in new_events:
event['event_id'] = event['_id']
event['incident_type'] = event_type
events.extend(new_events)
return events
def fetch_incidents(client: Client, max_fetch: int, first_fetch: str, fetch_events: bool,
max_events_fetch: int, event_types: List[str], alerts_query: Optional[str],
events_query: Optional[str]) -> None:
"""
Fetches alerts and events as incidents.
Args:
client (Client): The Netskope client.
max_fetch (int): Maximum number of incidents to fetch.
first_fetch (str): The timestamp to fetch the incidents from.
max_events_fetch (int): Maximum number of events to fetch.
event_types (List[str]): The type of events to fetch.
alerts_query (Optional[str]): Query for filtering the fetched alerts.
events_query (Optional[str]): Query for filtering the fetched events.
"""
validate_fetch_params(max_fetch, max_events_fetch, fetch_events, first_fetch, event_types)
last_run = demisto.getLastRun() or {}
first_fetch = arg_to_seconds_timestamp(first_fetch)
last_alert_time = last_run.get('last_alert_time') or first_fetch
alerts = client.list_alerts_request(start_time=last_alert_time,
end_time=date_to_seconds_timestamp(datetime.now()),
limit=max_fetch,
query=alerts_query,
unsorted=False)['data']
last_event_time = last_run.get('last_event_time') or first_fetch
if fetch_events:
events = fetch_multiple_type_events(client,
max_fetch=max_events_fetch,
start_time=last_event_time,
event_types=event_types,
query=events_query)
else:
events = []
incidents = []
for alert in alerts:
alert['incident_type'] = alert['alert_type']
incidents.append({
'name': alert['alert_name'],
'occurred': timestamp_to_datestring(alert['timestamp']),
'rawJSON': json.dumps(alert)
})
for event in events:
incidents.append({
'name': event['event_id'],
'occurred': timestamp_to_datestring(event['timestamp']),
'rawJSON': json.dumps(event)
})
# The alerts and events are sorted in descending order.
# Also, we increment the timestamp in one second to avoid duplicates.
demisto.setLastRun({
'last_alert_time': alerts[0]['timestamp'] + 1 if alerts else last_alert_time,
'last_event_time': events[0]['timestamp'] + 1 if events else last_event_time
})
demisto.incidents(incidents)
def main():
params = demisto.params()
url = params['url']
token = params['<PASSWORD>']['password']
use_ssl = not params.get('insecure', False)
use_proxy = params.get('proxy', False)
max_fetch = arg_to_number(params.get('max_fetch', DEFAULT_MAX_FETCH))
first_fetch = params.get('first_fetch', DEFAULT_FIRST_FETCH)
fetch_events = argToBoolean(params.get('fetch_events', False))
event_types = argToList(params.get('fetch_event_types', DEFAULT_EVENT_TYPE))
max_events_fetch = arg_to_number(params.get('max_events_fetch', DEFAULT_EVENTS_FETCH))
client = Client(url, token, use_ssl, use_proxy)
commands = {
'netskope-event-list': list_events_command,
'netskope-alert-list': list_alerts_command,
'netskope-quarantined-file-list': list_quarantined_files_command,
'netskope-quarantined-file-get': get_quarantined_file_command,
'netskope-quarantined-file-update': update_quarantined_file_command,
'netskope-url-list-update': update_url_list_command,
'netskope-file-hash-list-update': update_file_hash_list_command,
'netskope-client-list': list_clients_command,
'netskope-host-associated-user-list': list_host_associated_user_command,
'netskope-user-associated-host-list': list_user_associated_host_command,
}
try:
command = demisto.command()
if command == 'test-module':
return_results(
test_module(client,
max_fetch=max_fetch,
first_fetch=first_fetch,
fetch_events=fetch_events,
max_events_fetch=max_events_fetch,
event_types=event_types))
elif command == 'fetch-incidents':
fetch_incidents(client,
max_fetch=max_fetch,
first_fetch=first_fetch,
fetch_events=fetch_events,
max_events_fetch=max_events_fetch,
event_types=event_types,
alerts_query=demisto.params().get('alert_query'),
events_query=demisto.params().get('events_query'))
elif command in commands:
return_results(commands[command](client, demisto.args()))
else:
raise NotImplementedError(f'The command {command} does not exist!')
except Exception as e:
demisto.error(traceback.format_exc())
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{e}')
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Integrations/OktaEventCollector/OktaEventCollector_test.py
```python
from OktaEventCollector import ReqParams, Client, Request, GetEvents, Method
import pytest
req_params = ReqParams(since='', sortOrder='ASCENDING', limit='5')
request = Request(method=Method.GET, url='https://testurl.com', headers={}, params=req_params)
client = Client(request)
get_events = GetEvents(client)
id1 = {'uuid': 'a5b57ec5febb'}
id2 = {'uuid': 'a5b57ec5fecc'}
id3 = {'uuid': 'a12f3c5d77f3'}
id4 = {'uuid': 'a12f3c5dxxxx'}
class MockResponse:
def __init__(self, data):
self.data = data
def json(self):
return self.data
@pytest.mark.parametrize("events,ids,result", [
([id1, id2, id3], ['a12f3c5d77f3'], [id1, id2]),
([id1, id2, id3], ['a12f3c5dxxxx'], [id1, id2, id3]),
([], ['a12f3c5d77f3'], []),
([{'uuid': 0}, {'uuid': 1}, {'uuid': 2}, {'uuid': 3}, {'uuid': 4}, {'uuid': 5}, {'uuid': 6}, {'uuid': 7},
{'uuid': 8}, {'uuid': 9}], [0, 4, 7, 9],
[{'uuid': 1}, {'uuid': 2}, {'uuid': 3}, {'uuid': 5}, {'uuid': 6}, {'uuid': 8}])])
def test_remove_duplicates(events, ids, result):
assert get_events.remove_duplicates(events, ids) == result
@pytest.mark.parametrize("events,result", [
([{'published': '2022-04-17T12:31:36.667',
'uuid': '1d0844b6-3148-11ec-9027-a5b57ec5faaa'},
{'published': '2022-04-17T12:32:36.667',
'uuid': '1d0844b6-3148-11ec-9027-a5b57ec5fbbb'},
{'published': '2022-04-17T12:33:36.667',
'uuid': '1d0844b6-3148-11ec-9027-a5b57ec5fccc'}],
{'after': '2022-04-17T12:33:36.667000', 'ids': ['1d0844b6-3148-11ec-9027-a5b57ec5fccc']}),
([{'published': '2022-04-17T12:31:36.667',
'uuid': '1d0844b6-3148-11ec-9027-a5b57ec5faaa'},
{'published': '2022-04-17T12:32:36.667',
'uuid': '1d0844b6-3148-11ec-9027-a5b57ec5fbbb'},
{'published': '2022-04-17T12:32:36.667',
'uuid': '1d0844b6-3148-11ec-9027-a5b57ec5fccc'}], {'after': '2022-04-17T12:32:36.667000',
'ids': ['1d0844b6-3148-11ec-9027-a5b57ec5fccc',
'1d0844b6-3148-11ec-9027-a5b57ec5fbbb']})])
def test_get_last_run(events, result):
assert get_events.get_last_run(events) == result
@pytest.mark.parametrize("time", ['2022-04-17T12:32:36.667)'])
def test_set_since_value(time):
req_params.set_since_value(time)
assert req_params.since == time
def test_make_api_call(mocker):
mock_res = MockResponse([{1}, {1}, {1}, {1}, {1}])
mocker.patch.object(client, 'call', return_value=mock_res)
assert get_events.make_api_call() == [{1}, {1}, {1}, {1}, {1}]
mock_res.data = [{1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}]
assert get_events.make_api_call() == [{1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}, {1}]
```
#### File: Integrations/SentinelOne-V2/SentinelOne-V2_test.py
```python
import io
import json
import pytest
import demistomock as demisto
from importlib import import_module
sentinelone_v2 = import_module('SentinelOne-V2')
main = sentinelone_v2.main
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
@pytest.fixture()
def demisto_mocker_2_1(mocker):
mocker.patch.object(demisto, 'params', return_value={'token': 'token',
'url': 'https://usea1.sentinelone.net',
'api_version': '2.1',
'fetch_threat_rank': '4'})
mocker.patch.object(demisto, 'getLastRun', return_value={'time': 1558541949000})
mocker.patch.object(demisto, 'incidents')
@pytest.fixture()
def demisto_mocker_2_0(mocker):
mocker.patch.object(demisto, 'params', return_value={'token': 'token',
'url': 'https://usea1.sentinelone.net',
'api_version': '2.0',
'fetch_threat_rank': '4'})
mocker.patch.object(demisto, 'getLastRun', return_value={'time': 1558541949000})
mocker.patch.object(demisto, 'incidents')
mocker.patch.object(demisto, 'results')
def test_fetch_incidents__2_1(mocker, requests_mock, demisto_mocker_2_1):
"""
When:
fetch-incident and API version is 2.1
Returns:
All the threats received by the API as incidents regardless to the rank.
"""
raw_threat_response = util_load_json('test_data/get_threats_2_1_raw_response.json')
incidents_for_fetch = util_load_json('test_data/incidents_2_1.json')
mocker.patch.object(demisto, 'command', return_value='fetch-incidents')
requests_mock.get('https://usea1.sentinelone.net/web/api/v2.1/threats', json=raw_threat_response)
main()
assert demisto.incidents.call_count == 1
incidents = demisto.incidents.call_args[0][0]
assert len(incidents) == 4
assert incidents[0]['occurred'] == '2019-09-15T12:05:49.095889Z'
assert incidents[1]['occurred'] == '2019-09-15T12:14:42.440985Z'
assert incidents[2]['occurred'] == '2019-09-15T12:14:43.349807Z'
assert incidents[3]['occurred'] == '2019-09-15T12:14:44.069617Z'
assert incidents_for_fetch == incidents
def test_fetch_incidents__2_0(mocker, requests_mock, demisto_mocker_2_0):
"""
When:
fetch-incident and API version is 2.0
Returns:
List of incidents with rank threshold matches to the fetch_threat_rank.
"""
raw_threat_response = util_load_json('test_data/get_threats_2_0_raw_response.json')
incidents_for_fetch = util_load_json('test_data/incidents_2_0.json')
mocker.patch.object(demisto, 'command', return_value='fetch-incidents')
requests_mock.get('https://usea1.sentinelone.net/web/api/v2.0/threats', json=raw_threat_response)
main()
assert demisto.incidents.call_count == 1
incidents = demisto.incidents.call_args[0][0]
assert len(incidents) == 2
assert incidents[0]['occurred'] == '2019-09-15T12:05:49.095889Z'
assert incidents[1]['occurred'] == '2019-09-15T12:14:42.440985Z'
assert incidents_for_fetch == incidents
def test_get_threats_outputs():
"""
When:
parsing raw response from the API to XSOAR output
Returns:
List of threat outputs.
"""
raw_threat_response = util_load_json('test_data/get_threats_2_1_raw_response.json')['data']
expected = util_load_json('test_data/threats_outputs.json')
threats_output = list(sentinelone_v2.get_threats_outputs(raw_threat_response))
assert expected == threats_output
def test_get_agents_outputs():
"""
When:
parsing raw response of agents from the API to XSOAR output
Returns:
List of agents.
"""
raw_agent_response = util_load_json('test_data/agents_raw_response.json')
expected = util_load_json('test_data/agent_outputs.json')
agent_output = list(sentinelone_v2.get_agents_outputs(raw_agent_response))
assert expected == agent_output
def test_fetch_file(mocker, requests_mock):
"""
When:
fetch file request submitted
Returns
"String that it was successfully initiated"
"""
agent_id = 1
requests_mock.post(f'https://usea1.sentinelone.net/web/api/v2.1/agents/{agent_id}/actions/fetch-files', json={})
mocker.patch.object(demisto, 'params', return_value={'token': 'token',
'url': 'https://usea1.sentinelone.net',
'api_version': '2.1',
'fetch_threat_rank': '4'})
mocker.patch.object(demisto, 'command', return_value='sentinelone-fetch-file')
mocker.patch.object(demisto, 'args', return_value={
'agent_id': agent_id,
'file_path': "/does/not/matter/for/test",
'password': "<PASSWORD>"
})
mocker.patch.object(sentinelone_v2, "return_results")
main()
sentinelone_v2.return_results.assert_called_once_with(
f"Intiated fetch-file action for /does/not/matter/for/test on Agent {agent_id}")
def test_download_fetched_file(mocker, requests_mock, capfd):
"""
When:
request sent to retrieve a downloaded file
Return:
File entry of the file downloaded
"""
agent_id = 1
with open('test_data/download_fetched_file.zip', 'rb') as f:
dffzip_contents = f.read()
requests_mock.get(f'https://usea1.sentinelone.net/web/api/v2.1/agents/{agent_id}/uploads/1', content=dffzip_contents)
mocker.patch.object(demisto, 'params', return_value={'token': 'token',
'url': 'https://usea1.sentinelone.net',
'api_version': '2.1',
'fetch_threat_rank': '4'})
mocker.patch.object(demisto, 'command', return_value='sentinelone-download-fetched-file')
mocker.patch.object(demisto, 'args', return_value={
'agent_id': agent_id,
'activity_id': "1",
'password': "password" # This matches the password of the `download_fetched_file.zip` file in test_data
})
mocker.patch.object(sentinelone_v2, "return_results")
main()
call = sentinelone_v2.return_results.call_args_list
command_results, file_result = call[0].args[0]
assert command_results.outputs['Path'] == "download_fetched_file/"
def test_get_blocklist(mocker, requests_mock):
"""
When:
Request is made to retrieve the blocklist
Return:
The blocklist
"""
raw_blockist_response = util_load_json('test_data/get_blocklist.json')
blocklist_results = util_load_json('test_data/get_blocklist_results.json')
requests_mock.get("https://usea1.sentinelone.net/web/api/v2.1/restrictions?tenant=True&groupIds=group_id&siteIds=site_id"
"&accountIds=account_id&skip=0&limit=1&sortBy=updatedAt&sortOrder=desc",
json=raw_blockist_response)
mocker.patch.object(demisto, 'params', return_value={'token': 'token',
'url': 'https://usea1.sentinelone.net',
'api_version': '2.1',
'fetch_threat_rank': '4'})
mocker.patch.object(demisto, 'command', return_value='sentinelone-get-blocklist')
mocker.patch.object(demisto, 'args', return_value={
'offset': "0",
'limit': "1",
'group_ids': ["group_id"],
'site_ids': ["site_id"],
'account_ids': ["account_id"],
'global': "true"
})
mocker.patch.object(sentinelone_v2, "return_results")
main()
call = sentinelone_v2.return_results.call_args_list
command_results = call[0].args[0]
assert command_results.outputs == blocklist_results
def test_remove_hash_from_blocklist(mocker, requests_mock):
"""
When:
A hash is removed from the blocklist
Return:
Status that it has been removed from the blocklist
"""
raw_blockist_response = util_load_json('test_data/remove_hash_from_blocklist.json')
requests_mock.get("https://usea1.sentinelone.net/web/api/v2.1/restrictions?tenant=True&skip=0&limit=4&sortBy=updatedAt&"
"sortOrder=asc&value__contains=f2ca1bb6c7e907d06dafe4687e579fce76b37e4e93b7605022da52e6ccc26fd2",
json=raw_blockist_response)
requests_mock.delete("https://usea1.sentinelone.net/web/api/v2.1/restrictions", json={"data": []})
mocker.patch.object(demisto, 'params', return_value={'token': 'token',
'url': 'https://usea1.sentinelone.net',
'api_version': '2.1',
'fetch_threat_rank': '4'})
mocker.patch.object(demisto, 'command', return_value='sentinelone-remove-hash-from-blocklist')
mocker.patch.object(demisto, 'args', return_value={
'sha1': 'f2ca1bb6c7e907d06dafe4687e579fce76b37e4e93b7605022da52e6ccc26fd2'
})
mocker.patch.object(sentinelone_v2, "return_results")
main()
call = sentinelone_v2.return_results.call_args_list
outputs = call[0].args[0].outputs
assert outputs['hash'] == 'f2ca1bb6c7e907d06dafe4687e579fce76b37e4e93b7605022da52e6ccc26fd2'
assert outputs['status'] == 'Removed 1 entries from blocklist'
def test_add_hash_to_blocklist(mocker, requests_mock):
"""
When:
A hash is added to the blocklist
Return:
CommandResults with outputs set to a dict that has the hash and a response message
"""
requests_mock.post("https://usea1.sentinelone.net/web/api/v2.1/restrictions", json={"data": []})
mocker.patch.object(demisto, 'params', return_value={'token': 'token',
'url': 'https://usea1.sentinelone.net',
'api_version': '2.1',
'fetch_threat_rank': '4'})
mocker.patch.object(demisto, 'command', return_value='sentinelone-add-hash-to-blocklist')
mocker.patch.object(demisto, 'args', return_value={
'sha1': 'f2ca1bb6c7e907d06dafe4687e579fce76b37e4e93b7605022da52e6ccc26fd2'
})
mocker.patch.object(sentinelone_v2, "return_results")
main()
call = sentinelone_v2.return_results.call_args_list
outputs = call[0].args[0].outputs
assert outputs['hash'] == 'f2ca1bb6c7e907d06dafe4687e579fce76b37e4e93b7605022da52e6ccc26fd2'
assert outputs['status'] == 'Added to blocklist'
```
#### File: Integrations/SymantecDLPV2/SymantecDLPV2.py
```python
import demistomock as demisto
from CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import
from CommonServerUserPython import * # noqa
import dateparser
import requests
import traceback
from typing import Dict, Any
# Disable insecure warnings
requests.packages.urllib3.disable_warnings() # pylint: disable=no-member
''' CONSTANTS '''
DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # ISO8601 format with UTC, default in XSOAR
MAX_PAGE_SIZE = 50
INCIDENT_TYPE_MAPPING = {
'Network': 'NETWORK',
'Discover': 'DISCOVER',
'Endpoint': 'ENDPOINT'
}
INCIDENT_SEVERITY_MAPPING = {
'Info': 4,
'Low': 3,
'Medium': 2,
'High': 1
}
UPDATE_INCIDENT_SEVERITY_MAPPING = {
'Info': 'INFO',
'Low': 'LOW',
'Medium': 'MEDIUM',
'High': 'HIGH'
}
INCIDENT_UPDATE_MAPPING = {
'incident_id': 'incidentIds',
'data_owner_email': 'dataOwnerEmail',
'data_owner_name': 'dataOwnerName',
'note': 'incidentNotes',
'incident_status_id': 'incidentStatusId',
'remediation_status_name': 'preventOrProtectStatus',
'remediation_location': 'remediationLocation',
'severity': 'severity',
'custom_attributes': 'incidentCustomAttributes'
}
INCIDENTS_LIST_BODY = [
{
"name": "incidentId"
},
{
"name": "incidentStatusId"
},
{
"name": "creationDate"
},
{
"name": "detectionDate"
},
{
"name": "severityId"
},
{
"name": "messageSource"
},
{
"name": "messageTypeId"
},
{
"name": "policyVersion"
},
{
"name": "policyId"
},
{
"name": "matchCount"
},
{
"name": "detectionServerId"
}
]
''' CLIENT CLASS '''
class Client(BaseClient):
def __init__(self, base_url, verify, proxy, headers, auth):
super().__init__(base_url=base_url, verify=verify, proxy=proxy, headers=headers, auth=auth)
def get_incidents_request(self, creation_date: str = None, status_id: List[str] = None, severity: List[int] = None,
incident_type: List[str] = None, limit: int = MAX_PAGE_SIZE, order_by: bool = None):
"""Returns incidents list
in the input (dummy).
:param creation_date: The creation date to filter. (greater than the creation date)
:param status_id: The status IDs to filter.
:param severity: The severities to filter.
:param incident_type: The incident types to filter.
:param limit: The limit of the incidents.
:param order_by: If order by according the creation date or not
"""
data = {"limit": limit, "select": INCIDENTS_LIST_BODY}
if order_by:
data["orderBy"] = [{"order": "ASC", "field": {"name": "creationDate"}}]
if creation_date or status_id or severity or incident_type:
data['filter'] = {"booleanOperator": "AND", "filterType": "booleanLogic", "filters": []}
if creation_date:
data['filter']['filters'].append( # type: ignore
create_filter_dict(filter_type="localDateTime", filter_by="creationDate",
filter_value=[creation_date], operator="GT"))
if status_id:
data['filter']['filters'].append( # type: ignore
create_filter_dict(filter_type="long", filter_by="incidentStatusId",
filter_value=status_id, operator="IN"))
if severity:
data['filter']['filters'].append( # type: ignore
create_filter_dict(filter_type="long", filter_by="severityId",
filter_value=severity, operator="IN"))
if incident_type:
data['filter']['filters'].append( # type: ignore
create_filter_dict(filter_type="string", filter_by="messageSource",
filter_value=incident_type, operator="IN"))
headers = self._headers
response = self._http_request(method='POST', url_suffix='/ProtectManager/webservices/v2/incidents',
json_data=data, headers=headers)
return response
def update_incident_request(self, update_body: Dict[str, Any]) -> Dict[str, str]:
"""Update incident
:param update_body: The details to update in the incident.
"""
headers = self._headers
response = self._http_request(method='PATCH', url_suffix='/ProtectManager/webservices/v2/incidents',
headers=headers, json_data=update_body)
return response
def get_incident_static_attributes_request(self, incident_id: str) -> Dict[str, str]:
"""Returns incident static attributes.
:param incident_id: The incident ID.
"""
headers = self._headers
response = self._http_request(method='GET', url_suffix=f'/ProtectManager/webservices/'
f'v2/incidents/{incident_id}/staticAttributes',
headers=headers)
return response
def get_incident_editable_attributes_request(self, incident_id: str) -> Dict[str, str]:
"""Returns incident editable attributes.
:param incident_id: The incident ID.
"""
headers = self._headers
response = self._http_request(method='GET', url_suffix=f'/ProtectManager/webservices/'
f'v2/incidents/{incident_id}/editableAttributes',
headers=headers)
return response
def get_incidents_status_request(self) -> List[dict]:
"""Returns incidents status
"""
headers = self._headers
response = self._http_request(method='GET', url_suffix='/ProtectManager/webservices/v2/incidents/statuses',
headers=headers)
return response
def get_incident_history_request(self, incident_id: Optional[int]) -> List[dict]:
"""Returns incident history
:param incident_id: The incident ID.
"""
headers = self._headers
response = self._http_request(method='GET', url_suffix=f'/ProtectManager/webservices/v2/incidents/'
f'{incident_id}/history', headers=headers)
return response
def get_list_remediation_status_request(self) -> List[dict]:
"""Returns incidents remediation status
"""
headers = self._headers
response = self._http_request(method='GET', url_suffix='/ProtectManager/webservices/v2/incidents/'
'protectOrPreventStatuses', headers=headers)
return response
''' HELPER FUNCTIONS '''
def check_status_ids_type(status_ids_list: list):
if not all(status_id.isdigit() for status_id in status_ids_list):
raise ValueError("Status IDs must be integers.")
return status_ids_list
def create_filter_dict(filter_type: str, filter_by: str, filter_value: List[Any], operator: str) -> Dict[str, Any]:
"""Creates a dictionary with the filter for the list-incidents request.
:param filter_type: The filter type.
:param filter_by: The field name to filter by.
:param filter_value: The filter value.
:param operator: The operator to use for the filter.
"""
return {"filterType": filter_type, "operandOne": {"name": filter_by},
"operandTwoValues": filter_value, "operator": operator}
def get_severity_name_by_id(severity: Optional[int]):
"""Returns the name of the severity according to the given severity ID
:param severity: The severity ID.
"""
for severity_name, severity_num in INCIDENT_SEVERITY_MAPPING.items():
if severity_num == severity:
return severity_name
def parse_creation_date(creation_date: str):
if creation_date:
creation_date = dateparser.parse(creation_date).strftime(DATE_FORMAT) # type: ignore
return creation_date
def get_readable_output_incidents_list(incidents_list: List[dict]):
readable_output = []
for incident in incidents_list:
readable_output.append(assign_params(**{
'ID': incident.get('incidentId'),
'Severity': get_severity_name_by_id(arg_to_number(incident.get('severityId'))),
'Status': incident.get('incidentStatusId'),
'Incident Type': incident.get('messageSource'),
'Creation Date': incident.get('creationDate'),
'Message Type': incident.get('messageType'),
'Policy ID': incident.get('policyId'),
'Match Count': incident.get('matchCount')
}))
return readable_output
def get_context_incidents_list(incidents_list: List[dict]):
for incident in incidents_list:
incident_id = {'ID': incident.get('incidentId')}
incident_severity = {"severity": get_severity_name_by_id(arg_to_number(incident.get('severityId')))}
incident.pop('severityId')
incident.pop('incidentId')
incident.update(incident_id)
incident.update(incident_severity)
return incidents_list
def get_readable_output_incident_details(incidents_list: List[dict]):
readable_output = []
for incident in incidents_list:
readable_output.append(assign_params(**{
'ID': incident.get('incidentId'),
'Severity': get_severity_name_by_id(incident.get('severityId')),
'Incident Type': incident.get('messageSource'),
'Creation Date': incident.get('creationDate'),
'Detection Date': incident.get('detectionDate'),
'Message Type': incident.get('messageType'),
'Message Source': incident.get('messageSource'),
'Detection Server Name': incident.get('detectionServerName'),
'Data Owner Name': incident.get('dataOwnerName'),
'Data Owner Email': incident.get('dataOwnerEmail'),
'Status': incident.get('incidentStatusId'),
'Policy Name': incident.get('policyName'),
'Policy Group Name': incident.get('policyGroupName'),
'Custom Attributes': incident.get('customAttributeGroup')
}))
return readable_output
def get_incidents_of_current_page(limit, page, page_size, incidents_list):
"""
:param limit: The limit of the incidents.
:param page: The page number
:param page_size: Maximum number of objects to retrieve per page.
:param incidents_list: The incidents list
:return: List of objects from the response according to the limit, page and page_size.
"""
if page is not None and page_size is not None:
if page <= 0:
raise Exception('Chosen page number must be greater than 0')
start = (page - 1) * page_size
end = page * page_size
return incidents_list[start:end]
limit = limit if limit else MAX_PAGE_SIZE
return incidents_list[0:limit]
def parse_custom_attribute(custom_attribute_group_list: list, args: dict) -> list:
"""
Returns a list of all custom attributes chosen by the user.
There are four options to choose from: all, none, specific attributes, custom attributes group name.
The choosing flag is given in demisto.args value in the field custom_attributes.
If the user has chosen "all" then the function will return all custom attributes possible (from all groups).
If the user has chosen "none" then the function won't return any custom attributes.
If the user has chosen "specific attributes" then he must also provide a list of all custom attribute names in the
demisto.args dict under the field "custom_data". If not provided, an error msg will be shown. If provided,
the function will return only the custom attributes mentioned in the custom_data list.
If the user has chosen "custom attributes group name" the handling of this option is similar to the "custom" option.
:param custom_attribute_group_list: the raw list of custom attributes group (as returned from the request)
:param args: demisto.args
:return: the parsed custom attributes list
"""
custom_attributes_flag = args.get('custom_attributes')
custom_attributes_list: list = []
# all case
if custom_attributes_flag == 'all':
for group in custom_attribute_group_list:
custom_attributes_list.append(get_all_group_custom_attributes(group))
# custom attributes group name case
elif custom_attributes_flag == 'custom attribute group name':
custom_data = args.get('custom_data')
if not custom_data:
raise DemistoException('When choosing the group value for custom_attributes argument - the custom_data'
' list must be filled with group names. For example: custom_value=g1,g2,g3')
group_name_list: list = argToList(custom_data, ',')
for group in custom_attribute_group_list:
if group.get('name') in group_name_list:
custom_attributes_list.append(get_all_group_custom_attributes(group))
# specific attributes case
elif custom_attributes_flag == 'specific attributes':
custom_data = args.get('custom_data')
if not custom_data:
raise DemistoException('When choosing the custom value for custom_attributes argument - the custom_data'
' list must be filled with custom attribute names.'
' For example: custom_value=ca1,ca2,ca3')
custom_attribute_name_list: list = argToList(custom_data, ',')
for group in custom_attribute_group_list:
for raw_custom_attribute in group.get('customAttributes', []):
custom_attribute_name: str = raw_custom_attribute.get('name')
if custom_attribute_name in custom_attribute_name_list:
custom_attribute: dict = {'name': custom_attribute_name}
custom_attribute_value = raw_custom_attribute.get('value')
if custom_attribute_value:
custom_attribute['value'] = custom_attribute_value
custom_attribute['index'] = raw_custom_attribute.get('index')
custom_attributes_list.append({'name': group.get('name'), 'customAttribute': custom_attribute})
# none case - If custom_attributes_flag == 'none' than we return empty list
return custom_attributes_list
def get_all_group_custom_attributes(group: dict) -> dict:
"""
Returns a list of all the custom attributes in the group
:param group: the group
:return: the list of all custom attributes
"""
custom_attributes_dict: dict = {'name': group.get('name'), 'customAttribute': []}
for raw_custom_attribute in group.get('customAttributes', []):
custom_attribute: dict = {'name': raw_custom_attribute.get('name'), 'index': raw_custom_attribute.get('index')}
custom_attribute_value = raw_custom_attribute.get('value')
if custom_attribute_value:
custom_attribute['value'] = custom_attribute_value
custom_attributes_dict['customAttribute'].append(custom_attribute)
return custom_attributes_dict
def get_common_incident_details(static_attributes: dict, editable_attributes: dict, args) -> dict:
"""
Parses the needed incident details into context paths
:param static_attributes: The static attributes of the incident
:param editable_attributes: The editable attributes of the incident
:param args: demisto.args
:return: the parsed dict
"""
incident_info_map_editable = editable_attributes.get('infoMap', {})
incident_info_map_static = static_attributes.get('infoMap', {})
incident_custom_attribute_groups = editable_attributes.get('customAttributeGroups', [])
incident_details: dict = assign_params(**{
'ID': static_attributes.get('incidentId'),
'severity': get_severity_name_by_id(arg_to_number(incident_info_map_editable.get('severityId'))),
'customAttributeGroup': parse_custom_attribute(incident_custom_attribute_groups, args),
})
static_attributes.pop('incidentId')
editable_attributes.pop('incidentId')
incident_info_map_editable.pop('severityId')
editable_attributes.pop('customAttributeGroups')
incident_details.update(incident_info_map_static)
incident_details.update(incident_info_map_editable)
return assign_params(**incident_details)
def get_details_unauthorized_incident(incident_data):
incident_details: dict = assign_params(**{
'ID': incident_data.get('incidentId'),
'creationDate': incident_data.get('creationDate'),
'policyId': incident_data.get('policyId'),
'severity': get_severity_name_by_id(arg_to_number(incident_data.get('severityId'))),
'incidentStatusId': incident_data.get('incidentStatusId'),
'detectionDate': incident_data.get('detectionDate'),
'policyVersion': incident_data.get('policyVersion'),
'messageSource': incident_data.get('messageSource'),
'messageType': incident_data.get('messageType'),
'matchCount': incident_data.get('matchCount'),
'errorMessage': "Notice: Incident contains partial data only"
})
return {key: val for key, val in incident_details.items() if val}
def get_hr_context_incidents_status(status_list: List[dict]):
status_readable_output = []
for status in status_list:
status_readable_output.append(assign_params(**{
'id': status.get('id'),
'name': status.get('name'),
}))
return status_readable_output
def get_readable_output_incident_history(incident_history_list: List[dict]):
history_readable_output = []
for incident_history in incident_history_list:
history_readable_output.append(assign_params(**{
'History Date': incident_history.get('incidentHistoryDate'),
'Incident History Action': incident_history.get('incidentHistoryAction'),
'DLP User Name': incident_history.get('dlpUserName')
}))
return history_readable_output
def get_context_incident_history(incident_history_list: List[dict]):
history_context = []
incident_id = arg_to_number(incident_history_list[0].get('incidentId'))
for incident_history in incident_history_list:
incident_history.pop('incidentId')
incident_history.pop('incidentHistoryActionI18nKey')
incident_history.pop('internationalized')
history_context.append({"ID": incident_id, "incidentHistory": incident_history_list})
return history_context
def create_update_body(incident_ids: list, data_owner_email: str = None, data_owner_name: str = None,
note: str = None,
incident_status_id: str = None, remediation_status_name: str = None,
remediation_location: str = None,
severity: str = None, custom_attributes: List[str] = None):
data: Dict[str, Any] = assign_params(**{"incidentIds": incident_ids, 'dataOwnerEmail': data_owner_email,
'dataOwnerName': data_owner_name,
'incidentStatusId': incident_status_id,
'preventOrProtectStatus': remediation_status_name,
'remediationLocation': remediation_location, 'severity': severity})
custom_attributes_list = build_custom_attributes_update(custom_attributes) # type: ignore
if custom_attributes_list:
data['incidentCustomAttributes'] = custom_attributes_list
if note:
data['incidentNotes'] = [{'note': note}]
return data
def build_custom_attributes_update(custom_attributes: List[str]):
"""
Builds the custom_attributes_list that the user wants to update. The input should be {columnIndex}:{newValue}.
:param custom_attributes: The custom attributes the user wants to update
:return: A list of custom attributes
"""
custom_attributes_list = []
for attribute in custom_attributes:
splitted_att = attribute.split(':')
if len(splitted_att) != 2:
raise DemistoException('Error: custom_attributes argument format is {columnIndex}:{newValue}. E.g: 1:test')
attribute_index = splitted_att[0]
if not attribute_index.isdigit():
raise DemistoException('Error: The attribute index must be an integer.')
attribute_value = splitted_att[1]
custom_attributes_list.append({"columnIndex": int(attribute_index), "value": attribute_value})
return custom_attributes_list
def get_incident_details_fetch(client, incident):
"""
Create incident details dict for each incident pulled from the fetch
In case of getting 401 error we will return missing data on the incident.
"""
incident_details = {}
try:
incident_id = incident.get('incidentId')
static_attributes = client.get_incident_static_attributes_request(incident_id)
editable_attributes = client.get_incident_editable_attributes_request(incident_id)
incident_details = get_common_incident_details(static_attributes, editable_attributes,
args={"custom_attributes": "all"})
# In case of getting 401 (Unauthorized incident) - will get missing data
except DemistoException as e:
if '401' in str(e):
incident_details = get_details_unauthorized_incident(incident)
else:
raise e
return incident_details
''' COMMAND FUNCTIONS '''
def test_module(client: Client, params, fetch_time, fetch_limit, incident_type, incident_status_id,
incident_severity) -> str:
message: str = ''
try:
if params.get('isFetch'):
fetch_incidents(client, fetch_time=fetch_time, fetch_limit=fetch_limit, last_run={},
incident_types=incident_type, incident_status_id=incident_status_id,
incident_severities=incident_severity, is_test=True)
else:
client.get_incidents_request()
message = 'ok'
except DemistoException as e:
if 'Forbidden' in str(e) or 'Unauthorized' in str(e):
message = 'Authorization Error: make sure username and password are correctly set'
else:
raise e
return message
def list_incidents_command(client: Client, args: Dict[str, Any]) -> CommandResults:
creation_date = parse_creation_date(args.get('creation_date', ''))
status_ids = argToList(args.get('status_id', ''))
severities = argToList(args.get('severity', ''))
severities_dlp = [INCIDENT_SEVERITY_MAPPING[severity] for severity in severities]
incident_types = argToList(args.get('incident_type', ''))
incident_types_dlp = [INCIDENT_TYPE_MAPPING[incident_type] for incident_type in incident_types]
limit = arg_to_number(args.get('limit', 50))
page = arg_to_number(args.get('page', 1))
page_size = arg_to_number(args.get('page_size'))
incidents_result = client.get_incidents_request(creation_date, status_ids, severities_dlp, incident_types_dlp,
limit * page) # type: ignore
incidents_result = get_incidents_of_current_page(limit, page, page_size,
incidents_list=incidents_result['incidents'])
list_incidents_hr = get_readable_output_incidents_list(incidents_result)
context_incidents_list = get_context_incidents_list(incidents_result)
return CommandResults(
readable_output=tableToMarkdown(
"Symantec DLP incidents results",
list_incidents_hr,
removeNull=True,
headers=['ID', 'Severity', 'Status', 'Creation Date', 'Incident Type', 'Message Type', 'Policy ID',
'Match Count']
),
outputs_prefix='SymantecDLP.Incident',
outputs_key_field='ID',
outputs=context_incidents_list,
)
def update_incident_command(client: Client, args: Dict[str, Any]) -> CommandResults:
incident_ids = argToList(args.get('incident_ids'))
if not all(incident_id.isdigit() for incident_id in incident_ids):
raise ValueError("Incident IDs must be integers.")
data_owner_email = args.get('data_owner_email', '')
data_owner_name = args.get('data_owner_name', '')
note = args.get('note', '')
incident_status_id = args.get('incident_status_id', '')
remediation_status_name = args.get('remediation_status_name', '')
remediation_location = args.get('remediation_location', '')
severity = args.get('severity', '')
if severity:
severity = UPDATE_INCIDENT_SEVERITY_MAPPING[severity]
custom_attributes = argToList(args.get('custom_attributes', ''))
update_body = create_update_body(incident_ids=incident_ids, data_owner_email=data_owner_email,
data_owner_name=data_owner_name, note=note, incident_status_id=incident_status_id,
remediation_status_name=remediation_status_name,
remediation_location=remediation_location, severity=severity,
custom_attributes=custom_attributes)
client.update_incident_request(update_body)
return CommandResults(
readable_output=f"Symantec DLP incidents: {incident_ids} were updated"
)
def get_incident_details_command(client: Client, args: Dict[str, Any]):
"""
static attributes API docs - https://techdocs.broadcom.com/us/en/symantec-security-software/information-security/
data-loss-prevention/15-8/DLP-Enforce-REST-APIs-overview/definitions/staticincidentinfomap.html
editable attributes API docs - https://techdocs.broadcom.com/us/en/symantec-security-software/information-security/
data-loss-prevention/15-8/DLP-Enforce-REST-APIs-overview/definitions/editableincidentinfomap.html
"""
try:
incident_id = args.get('incident_id', '')
custom_attributes = args.get('custom_attributes', '')
custom_data = args.get('custom_data', '')
if custom_attributes in ['specific_attributes', 'custom_attribute_group_name'] and not custom_data:
raise DemistoException('Error: custom_data argument must be provided if you chose specific_attributes or'
' custom_attribute_group_name.')
static_attributes = client.get_incident_static_attributes_request(incident_id)
editable_attributes = client.get_incident_editable_attributes_request(incident_id)
incident_details = get_common_incident_details(static_attributes, editable_attributes, args=args)
incident_details_hr = get_readable_output_incident_details([incident_details])
return CommandResults(
readable_output=tableToMarkdown(
f"Symantec DLP incident {incident_id} details",
incident_details_hr,
removeNull=True,
json_transform_mapping={
'Custom Attributes': JsonTransformer(keys=('GroupName', 'name', 'value'), is_nested=True)},
headers=['ID', 'Severity', 'Status', 'Creation Date', 'Detection Date', 'Incident Type', 'Policy Name',
'Policy Group Name', 'Detection Server Name', 'Message Type', 'Message Source',
'Data Owner Name',
'Data Owner Email', 'Custom Attributes']
),
outputs_prefix='SymantecDLP.Incident',
outputs_key_field='ID',
outputs=incident_details,
)
except DemistoException as e:
if '401' in str(e):
raise DemistoException(f"Error 401: Incident access not authorized or the incident does not exist. {e.res}")
else:
raise DemistoException(f"Error {e.res}")
def list_incident_status_command(client: Client) -> CommandResults:
incidents_status_result = client.get_incidents_status_request()
return CommandResults(
readable_output=tableToMarkdown(
"Symantec DLP incidents status",
camelize(incidents_status_result),
removeNull=True,
),
outputs_prefix='SymantecDLP.IncidentStatus',
outputs_key_field='id',
outputs=incidents_status_result,
)
def get_incident_history_command(client: Client, args: Dict[str, Any]) -> CommandResults:
incident_id = arg_to_number(args.get('incident_id'))
limit = arg_to_number(args.get('limit', 50))
incident_history_result = client.get_incident_history_request(incident_id)
incident_history_result = incident_history_result[:limit]
incidents_history_hr = get_readable_output_incident_history(incident_history_result)
incidents_history_context = get_context_incident_history(incident_history_result)
return CommandResults(
readable_output=tableToMarkdown(
f"Symantec DLP Incident {incident_id} history results",
incidents_history_hr,
removeNull=True
),
outputs_prefix='SymantecDLP.IncidentHistory',
outputs_key_field='incidentId',
outputs=remove_empty_elements(incidents_history_context),
)
def get_list_remediation_status(client: Client) -> CommandResults:
remediation_status_result = client.get_list_remediation_status_request()
remediation_status_output = get_hr_context_incidents_status(remediation_status_result)
return CommandResults(
readable_output=tableToMarkdown(
"Incidents remediation status results",
camelize(remediation_status_output),
removeNull=True
),
outputs_prefix='SymantecDLP.IncidentRemediationStatus',
outputs_key_field='id',
outputs=remediation_status_output,
)
def is_incident_already_fetched_in_previous_fetch(last_update_time, incident_creation_date):
"""
Checks if the incident was already fetched
:param last_update_time: last_update_time from last_run
:param incident_creation_date: The current incident creation date
"""
return last_update_time and last_update_time >= incident_creation_date
def fetch_incidents(client: Client, fetch_time: str, fetch_limit: int, last_run: dict, incident_types: List[str] = None,
incident_status_id: List[str] = None, incident_severities: List[str] = None, is_test=False):
"""
Performs the fetch incidents functionality, which means that every minute if fetches incidents
from Symantec DLP and uploads them to Cortex XSOAR server.
There are multiple incidents created at the same time, that is why we check the lasst update time and incident ID
to make sure we will not fetch an incident that we already fetched.
:param client: Cortex XSOAR Client
:param fetch_time: For the first time the integration is enabled with the fetch incidents functionality, the fetch
time indicates from what time to start fetching existing incidents in Symantec DLP system.
:param fetch_limit: Indicates how many incidents to fetch every minute
:param last_run: Cortex XSOAR last run object
:param incident_types: The incident type to filter.
:param incident_status_id: The incident status ID to filter.
:param incident_severities: The incident severities to filter.
:param is_test: If we test the fetch for the test module
:return: A list of Cortex XSOAR incidents
"""
incidents = []
if incident_severities:
incident_severities = [INCIDENT_SEVERITY_MAPPING[severity] for severity in incident_severities] # type: ignore
if incident_types:
incident_types = [INCIDENT_TYPE_MAPPING[incident_type] for incident_type in incident_types]
if last_run:
last_update_time = last_run.get('last_incident_creation_time')
else:
# In first run
last_update_time = parse_creation_date(fetch_time)
incidents_data_res = client.get_incidents_request(status_id=incident_status_id,
severity=incident_severities, # type: ignore
incident_type=incident_types, limit=fetch_limit,
creation_date=last_update_time, order_by=True)
incidents_data_list = incidents_data_res.get('incidents', [])
for incident_data in incidents_data_list:
incident_id = incident_data.get('incidentId')
incident_creation_time = incident_data.get('creationDate')
if is_incident_already_fetched_in_previous_fetch(last_update_time, incident_creation_time):
# Skipping last incident from last cycle if fetched again
continue
incident_details = get_incident_details_fetch(client, incident_data)
incident: dict = {
'rawJSON': json.dumps(incident_details),
'name': f'Symantec DLP Incident ID {incident_id}',
'occurred': parse_creation_date(incident_creation_time)
}
incidents.append(incident)
if incident_creation_time == incidents_data_list[-1].get('creationDate'):
last_update_time = incident_creation_time
if is_test:
return None
demisto.setLastRun(
{
'last_incident_creation_time': last_update_time
}
)
# Sort the incidents list because the incident's ID and creation date are not synchronize
sorted_incidents = sorted(incidents, key=lambda d: d['name'])
return sorted_incidents
''' MAIN FUNCTION '''
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
try:
params = demisto.params()
server = params.get('server', '')
credentials = params.get('credentials', {})
username = credentials.get('identifier', '')
password = credentials.get('password', '')
incident_type = argToList(params.get('fetchIncidentType'), 'Network,Discover,Endpoint')
incident_status_id = check_status_ids_type(argToList(params.get('incidentStatusId'), ''))
incident_severity = argToList(params.get('incidentSeverity'), 'Medium,High')
verify_certificate = not params.get('insecure', False)
proxy = params.get('proxy', False)
fetch_time = params.get('first_fetch', '3 days').strip()
try:
fetch_limit: int = int(params.get('max_fetch', 10))
fetch_limit = MAX_PAGE_SIZE if fetch_limit > MAX_PAGE_SIZE else fetch_limit
except ValueError:
raise DemistoException('Value for fetch limit must be an integer.')
client = Client(
base_url=server,
verify=verify_certificate,
headers={"Content-type": "application/json"},
proxy=proxy,
auth=(username, password)
)
args = demisto.args()
demisto.debug(f'Command being called is {demisto.command()}')
if demisto.command() == 'test-module':
result = test_module(client, params, fetch_time, fetch_limit, incident_type, incident_status_id,
incident_severity)
return_results(result)
elif demisto.command() == 'fetch-incidents':
last_run = demisto.getLastRun()
incidents = fetch_incidents(client, fetch_time, fetch_limit, last_run, incident_type, incident_status_id,
incident_severity)
demisto.incidents(incidents)
elif demisto.command() == 'symantec-dlp-list-incidents':
return_results(list_incidents_command(client, args))
elif demisto.command() == 'symantec-dlp-get-incident-details':
return_results(get_incident_details_command(client, args))
elif demisto.command() == 'symantec-dlp-update-incident':
return_results(update_incident_command(client, args))
elif demisto.command() == 'symantec-dlp-list-incident-status':
return_results(list_incident_status_command(client))
elif demisto.command() == 'symantec-dlp-get-incident-history':
return_results(get_incident_history_command(client, args))
elif demisto.command() == 'symantec-dlp-list-remediation-status':
return_results(get_list_remediation_status(client))
# Log exceptions and return errors
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
```
#### File: Tests/Marketplace/search_and_uninstall_pack.py
```python
import ast
import json
import argparse
import os
import sys
import demisto_client
from Tests.scripts.utils import logging_wrapper as logging
from Tests.scripts.utils.log_util import install_logging
from Tests.Marketplace.search_and_install_packs import install_packs
from time import sleep
def get_all_installed_packs(client: demisto_client):
"""
Args:
client (demisto_client): The client to connect to.
Returns:
list of installed python
"""
try:
logging.info("Attempting to fetch all installed packs.")
response_data, status_code, _ = demisto_client.generic_request_func(client,
path='/contentpacks/metadata/installed',
method='GET',
accept='application/json',
_request_timeout=None)
if 200 <= status_code < 300:
installed_packs = ast.literal_eval(response_data)
installed_packs_ids = [pack.get('id') for pack in installed_packs]
logging.success('Successfully fetched all installed packs.')
installed_packs_ids_str = ', '.join(installed_packs_ids)
logging.debug(
f'The following packs are currently installed from a previous build run:\n{installed_packs_ids_str}')
if 'Base' in installed_packs_ids:
installed_packs_ids.remove('Base')
return installed_packs_ids
else:
result_object = ast.literal_eval(response_data)
message = result_object.get('message', '')
raise Exception(f'Failed to fetch installed packs - with status code {status_code}\n{message}')
except Exception as e:
logging.exception(f'The request to fetch installed packs has failed. Additional info: {str(e)}')
return None
def uninstall_packs(client: demisto_client, pack_ids: list):
"""
Args:
client (demisto_client): The client to connect to.
pack_ids: packs ids to uninstall
Returns:
True if uninstalling succeeded False otherwise.
"""
body = {"IDs": pack_ids}
try:
logging.info("Attempting to uninstall all installed packs.")
response_data, status_code, _ = demisto_client.generic_request_func(client,
path='/contentpacks/installed/delete',
method='POST',
body=body,
accept='application/json',
_request_timeout=None)
except Exception as e:
logging.exception(f'The request to uninstall packs has failed. Additional info: {str(e)}')
return False
return True
def uninstall_all_packs(client: demisto_client, hostname):
""" Lists all installed packs and uninstalling them.
Args:
client (demisto_client): The client to connect to.
hostname (str): xsiam hostname
Returns (list, bool):
A flag that indicates if the operation succeeded or not.
"""
logging.info(f'Starting to search and uninstall packs in server: {hostname}')
packs_to_uninstall: list = get_all_installed_packs(client)
if packs_to_uninstall:
return uninstall_packs(client, packs_to_uninstall)
logging.debug('Skipping packs uninstallation - nothing to uninstall')
return True
def reset_base_pack_version(client: demisto_client):
"""
Resets base pack version to prod version.
Args:
client (demisto_client): The client to connect to.
"""
host = client.api_client.configuration.host.replace('https://api-', 'https://') # disable-secrets-detection
try:
# make the search request
response_data, status_code, _ = demisto_client.generic_request_func(client,
path='/contentpacks/marketplace/Base',
method='GET',
accept='application/json',
_request_timeout=None)
if 200 <= status_code < 300:
result_object = ast.literal_eval(response_data)
if result_object and result_object.get('currentVersion'):
logging.debug('Found Base pack in bucket!')
pack_data = {
'id': result_object.get('id'),
'version': result_object.get('currentVersion')
}
# install latest version of Base pack
logging.info(f'updating base pack to version {result_object.get("currentVersion")}')
return install_packs(client, host, [pack_data], False)
else:
raise Exception('Did not find Base pack')
else:
result_object = ast.literal_eval(response_data)
msg = result_object.get('message', '')
err_msg = f'Search request for base pack, failed with status code ' \
f'{status_code}\n{msg}'
raise Exception(err_msg)
except Exception:
logging.exception('Search request Base pack has failed.')
return False
def wait_for_uninstallation_to_complete(client: demisto_client, retries: int = 30):
"""
Query if there are still installed packs, as it might take time to complete.
Args:
client (demisto_client): The client to connect to.
retries: Max number of sleep priods.
Returns: True if all packs were uninstalled successfully
"""
retry = 0
try:
installed_packs = get_all_installed_packs(client)
while len(installed_packs) > 1:
if retry > retries:
raise Exception('Waiting time for packs to be uninstalled has passed, there are still installed '
'packs. Aborting.')
logging.info(f'The process of uninstalling all packs is not over! There are still {len(installed_packs)} '
f'packs installed. Sleeping for 10 seconds.')
sleep(10)
installed_packs = get_all_installed_packs(client)
retry = retry + 1
except Exception as e:
logging.exception(f'Exception while waiting for the packs to be uninstalled. The error is {e}')
return False
return True
def options_handler():
"""
Returns: options parsed from input arguments.
"""
parser = argparse.ArgumentParser(description='Utility for instantiating and testing integration instances')
parser.add_argument('--xsiam_machine', help='XSIAM machine to use, if it is XSIAM build.')
parser.add_argument('--xsiam_servers_path', help='Path to secret xsiam server metadata file.')
options = parser.parse_args()
return options
def get_json_file(path):
"""
Args:
path: path to retrieve file from.
Returns: json object loaded from the path.
"""
with open(path, 'r') as json_file:
return json.loads(json_file.read())
def get_xsiam_configuration(xsiam_machine, xsiam_servers):
"""
Parses conf params from servers list.
"""
conf = xsiam_servers.get(xsiam_machine)
return conf.get('api_key'), conf.get('base_url'), conf.get('x-xdr-auth-id')
def main():
install_logging('cleanup_xsiam_instance.log', logger=logging)
# in xsiam we dont use demisto username
os.environ.pop('DEMISTO_USERNAME', None)
options = options_handler()
host = options.xsiam_machine
xsiam_servers = get_json_file(options.xsiam_servers_path)
api_key, base_url, xdr_auth_id = get_xsiam_configuration(options.xsiam_machine, xsiam_servers)
logging.info(f'Starting cleanup for XSIAM server {host}')
client = demisto_client.configure(base_url=base_url,
verify_ssl=False,
api_key=api_key,
auth_id=xdr_auth_id)
success = reset_base_pack_version(client) and uninstall_all_packs(client,
host) and wait_for_uninstallation_to_complete(
client)
if not success:
sys.exit(2)
logging.info('Uninstalling packs done.')
if __name__ == '__main__':
main()
``` |
{
"source": "jraval/Twitoff_PT10",
"score": 3
} |
#### File: Twitoff_PT10/twitoff/heroku_user.py
```python
import requests
import ast
from data_model import DB, User, Tweet
def get_user_and_tweets(username):
heroku_url = 'https://lambda-ds-twit-assist.herokuapp.com/user/'
user = ast.literal_eval(requests.get(heroku_url + username).text)
try:
if User.query.get(user['twitter_handle']['id']):
db_user = User.query.get(user['twitter_handle']['id'])
else:
db_user = User(id=user['twitter_handle']['id'],
name=user['twitter_handle']['username'])
DB.session.add(db_user)
for tweet in user['tweets']:
db_tweet = Tweet(id=tweet['id'], text=tweet['full_text'])
db_user.tweets.append(db_tweet)
DB.session.add(db_tweet)
except Exception as e:
raise e
else:
DB.session.commit()
``` |
{
"source": "jravenhorst909/matlab-api-example",
"score": 2
} |
#### File: matlab-api-example/API/api.py
```python
import base64
import json
import logging
import re
import struct
import crcmod
from os import path
try:
import enum
except ImportError:
print('Please install enum34 package')
raise
import requests
import six
from crcmod.predefined import mkPredefinedCrcFun
from mypy_extensions import TypedDict
logger = logging.getLogger('moveshelf-api')
class TimecodeFramerate(enum.Enum):
FPS_24 = '24'
FPS_25 = '25'
FPS_29_97 = '29.97'
FPS_30 = '30'
FPS_50 = '50'
FPS_59_94 = '59.94'
FPS_60 = '60'
FPS_1000 = '1000'
Timecode = TypedDict('Timecode', {
'timecode': str,
'framerate': TimecodeFramerate
})
Metadata = TypedDict('Metadata', {
'title': str,
'description': str,
'previewImageUri': str,
'allowDownload': bool,
'allowUnlistedAccess': bool,
'startTimecode': Timecode
},
total=False)
class MoveshelfApi(object):
def __init__(self, api_key_file, api_url = 'https://api.moveshelf.com/graphql'):
#def __init__(self, api_key_file='mvshlf-api-key.json', api_url = 'https://api.moveshelf.com/graphql'):
self._crc32c = mkPredefinedCrcFun('crc32c')
self.api_url = api_url
if path.isfile(api_key_file) == False:
raise ValueError("No valid API key. Please check instructions on https://github.com/moveshelf/python-api-example")
with open(api_key_file, 'r') as key_file:
data = json.load(key_file)
self._auth_token = BearerTokenAuth(data['secretKey'])
def getProjectDatasets(self, project_id):
data = self._dispatch_graphql(
'''
query getProjectDatasets($projectId: ID!) {
node(id: $projectId) {
... on Project {
id,
name,
datasets {
name,
downloadUri
}
}
}
}
''',
projectId = project_id
)
return [ d for d in data['node']['datasets']]
def getUserProjects(self):
data = self._dispatch_graphql(
'''
query {
viewer {
projects {
id
name
}
}
}
''',
)
return data['viewer']['projects']
def uploadFile(self, file_path, project, metadata=Metadata()):
logger.info('Uploading %s', file_path)
# metadata title va storto
metadata['title'] = metadata.get('title', path.basename(file_path))
metadata['allowDownload'] = metadata.get('allowDownload', False)
metadata['allowUnlistedAccess'] = metadata.get('allowUnlistedAccess', False)
if metadata.get('startTimecode'):
self._validateAndUpdateTimecode(metadata['startTimecode'])
creation_response = self._createClip(project, {
'clientId': file_path,
'crc32c': self._calculateCrc32c(file_path),
'filename': path.basename(file_path),
'metadata': metadata
})
logging.info('Created clip ID: %s', creation_response['mocapClip']['id'])
with open(file_path, 'rb') as fp:
requests.put(creation_response['uploadUrl'], data=fp)
return creation_response['mocapClip']['id']
def uploadAdditionalData(self, file_path, clipId, dataType, filename):
logger.info('Uploading %s', file_path)
creation_response = self._createAdditionalData(clipId, {
'clientId': file_path,
'crc32c': self._calculateCrc32c(file_path),
'filename': filename,
'dataType': dataType
})
logging.info('Created clip ID: %s', creation_response['data']['id'])
with open(file_path, 'rb') as fp:
requests.put(creation_response['uploadUrl'], data=fp)
return creation_response['data']['id']
def updateClipMetadata(self, clip_id, metadata):
logger.info('Updating metadata for clip: %s', clip_id)
if metadata.get('startTimecode'):
self._validateAndUpdateTimecode(metadata['startTimecode'])
res = self._dispatch_graphql(
'''
mutation updateClip($input: UpdateClipInput!) {
updateClip(clipData: $input) {
clip {
id,
title
}
}
}
''',
input = {
'id': clip_id,
'metadata': metadata
}
)
logging.info('Updated clip ID: %s', res['updateClip']['clip']['id'])
def getProjectClips(self, project_id, limit):
data = self._dispatch_graphql(
'''
query getAdditionalDataInfo($projectId: ID!, $limit: Int) {
node(id: $projectId) {
... on Project {
id,
name,
clips(first: $limit) {
edges {
node {
id,
title,
projectPath
}
}
}
}
}
}
''',
projectId = project_id,
limit = limit
)
return [c['node'] for c in data['node']['clips']['edges']]
def getAdditionalData(self, clip_id):
data = self._dispatch_graphql(
'''
query getAdditionalDataInfo($clipId: ID!) {
node(id: $clipId) {
... on MocapClip {
id,
additionalData {
id
dataType
originalFileName
previewDataUri
originalDataDownloadUri
}
}
}
}
''',
clipId = clip_id
)
return data['node']['additionalData']
def getProjectAndClips(self):
data = self._dispatch_graphql(
'''
query {
viewer {
projects {
id
name
clips(first: 20) {
edges {
node {
id,
title
}
}
}
}
}
}
'''
)
return [p for p in data['viewer']['projects']]
def _validateAndUpdateTimecode(self, tc):
assert tc.get('timecode')
assert tc.get('framerate')
assert isinstance(tc['framerate'], TimecodeFramerate)
assert re.match('\d{2}:\d{2}:\d{2}[:;]\d{2,3}', tc['timecode'])
tc['framerate'] = tc['framerate'].name
def _createClip(self, project, clip_creation_data):
data = self._dispatch_graphql(
'''
mutation createClip($input: ClipCreationInput!) {
createClips(input: $input) {
response {
clientId,
uploadUrl,
mocapClip {
id
}
}
}
}
''',
input = {
'project': project,
'clips': [clip_creation_data]
}
)
return data['createClips']['response'][0]
def _deleteClip(self, c_id):
data = self._dispatch_graphql(
'''
mutation deleteClip($clipId: String) {
deleteClip(clipId: $clipId) {
ok
}
}
''',
clipId = c_id
)
return data['deleteClip']['ok']
def _calculateCrc32c(self, file_path):
with open(file_path, 'rb') as fp:
crc = self._crc32c(fp.read())
b64_crc = base64.b64encode(struct.pack('>I', crc))
return b64_crc if six.PY2 else b64_crc.decode('utf8')
def _createAdditionalData(self, clipId, metadata):
data = self._dispatch_graphql(
'''
mutation createAdditionalData($input: CreateAdditionalDataInput) {
createAdditionalData(input: $input) {
uploadUrl
data {
id
dataType
originalFileName
uploadStatus
}
}
}
''',
input = {
'clipId': clipId,
'dataType': metadata['dataType'],
'crc32c': metadata['crc32c'],
'filename': metadata['filename'],
'clientId': metadata['clientId']
})
return data['createAdditionalData']
def _dispatch_graphql(self, query, **kwargs):
payload = {
'query': query,
'variables': kwargs
}
response = requests.post(self.api_url, json=payload, auth=self._auth_token)
response.raise_for_status()
json_data = response.json()
if 'errors' in json_data:
raise GraphQlException(json_data['errors'])
return json_data['data']
class BearerTokenAuth(requests.auth.AuthBase):
def __init__(self, token):
self._auth = 'Bearer {}'.format(token)
def __call__(self, request):
request.headers['Authorization'] = self._auth
return request
class GraphQlException(Exception):
def __init__(self, error_info):
self.error_info = error_info
``` |
{
"source": "jravesloot/sift_app",
"score": 3
} |
#### File: management/commands/refresh_heroku.py
```python
import os, sys
from datetime import datetime
from django.core.management.base import BaseCommand, CommandError
from django.core.management import call_command
from concerts.utils import FIXTURE_DIRS
class Command(BaseCommand):
help = "Flushes and re-seeds DB from fixtures, runs make_matches to find upcoming concerts."
def handle(self, *args, **options):
try:
artists_fixtures = os.listdir(FIXTURE_DIRS['artists'])
artists_fixtures.sort()
latest_artists_fixture = os.path.join(
FIXTURE_DIRS['artists'],
artists_fixtures[-1]
)
venues_fixtures = os.listdir(FIXTURE_DIRS['venues'])
venues_fixtures.sort()
latest_venues_fixture = os.path.join(
FIXTURE_DIRS['venues'],
venues_fixtures[-1]
)
concerts_fixtures = os.listdir(FIXTURE_DIRS['concerts'])
concerts_fixtures.sort()
latest_concerts_fixture = os.path.join(
FIXTURE_DIRS['concerts'],
concerts_fixtures[-1]
)
except IndexError as e:
sys.exit("Problem accessing a fixture: {}".format(e.args))
self.stdout.write("Flushing DB...")
call_command('flush', '--noinput')
self.stdout.write("Loading artists from fixture...")
call_command('loaddata', latest_artists_fixture)
self.stdout.write("Loading venues from fixture...")
call_command('loaddata', latest_venues_fixture)
self.stdout.write("Loading concerts from fixture...")
call_command('loaddata', latest_concerts_fixture)
self.stdout.write("Making concert matches...")
call_command('make_matches')
self.stdout.write("Done!")
```
#### File: concerts/scrapers/bottomlounge.py
```python
from .venue import Venue
import calendar, datetime, iso8601, os, pytz, sys, time
from collections import namedtuple
import requests
from bs4 import BeautifulSoup as bs
TODAY = datetime.datetime.today()
class BottomLounge(Venue):
"""
Scraper object for Bottom Lounge.
1375 W Lake St.
Chicago, IL, 60607
http://bottomlounge.com
"""
def __init__(self):
super().__init__()
self.venue_name = 'Bottom Lounge'
self.url = 'http://bottomlounge.com/events'
def get_summaries(self, html):
"""
See Venue.get_summaries.
html dump > '.schedule-item-content'
"""
show_summaries = bs(html, 'html.parser').select('.schedule-item-content')
return show_summaries
def get_artist_billing(self, summary):
"""
See Venue.get_artist_billing.
'.schedule-item-content' > '.schedule-title' (All one element)
"""
# artist billing as one string; could break down on ' * '
artists = summary.select('.schedule-title')[0].text
return artists
def get_venue_info(self, summary):
"""
See Venue.get_venue_info.
Bottom Lounge appears currently to promote in-house shows only.
"""
# BL seems to promote in-house shows only
venue_name = self.venue_name
# TODO remove venue_id logic
venue_id = 99
return (venue_name, venue_id)
def get_show_date(self, summary):
"""
See Venue.get_show_date.
'.schedule-item-content' > '.schedule-date' >
span[0]: '09/08/2016'
span[1]: ' Doors 6:00 PM Show 6:30 PM'
Use show time.
"""
dt_spans = summary.select('.schedule-date')[0].find_all('span')
show_month, show_date, show_year = tuple(dt_spans[0].text.split('/'))
# use show time
html_time = ' '.join(dt_spans[1].text.split()[-2:])
# Rarely, a show on the site doesn't have AM/PM, eg. 'Show 6:00'.
# In such cases, assume it's PM.
if html_time.split()[0].isalpha():
html_time = '{} PM'.format(html_time.split()[1])
# convert str to 24hr
try:
t = time.strptime(html_time, '%I:%M %p')
except ValueError:
# XXX actually handle this
t = time.strptime('12:12 PM', '%I:%M %p')
utc_datetime = Venue.make_utc_datetime(
show_year=int(show_year),
show_month=int(show_month),
show_day=int(show_date),
show_hour=t.tm_hour,
show_minute=t.tm_min
)
return utc_datetime
def get_show_price(self, summary):
"""
See Venue.get_show_price.
BL prices only available through TicketWeb
"""
price = '(See ticketing site for price)'
return price
def get_show_url(self, summary):
"""
See Venue.get_show_url.
'.schedule-item-content' > first link
"""
show_url = summary.find('a', href=True)['href']
return show_url
```
#### File: concerts/scrapers/houseofblues.py
```python
import calendar, datetime, iso8601, os, pytz, sys, time
from collections import namedtuple
import requests
from bs4 import BeautifulSoup as bs
from .venue import Venue
TODAY = datetime.datetime.today()
class HouseOfBlues(Venue):
"""
Scraper object for House of Blues (Chicago).
329 N Dearborn St.
Chicago, IL 60654
http://houseofblues.com/chicago/
"""
def __init__(self):
super().__init__()
self.venue_name = 'House of Blues'
self.url = 'http://houseofblues.com/chicago'
def load_live_shows(self):
"""
Scrapes the venue's concerts schedule page and populates
self.shows list with ShowTuples, if not self.shows.
"""
if not self.shows:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.PhantomJS()
driver.set_window_size(1221, 686)
driver.delete_all_cookies()
driver.get(self.url)
# set to list view
list_xpath = '//*[@id="EventCalendar122"]/div[1]/div/section/div/div[3]/a[2]'
list_button = driver.find_element_by_xpath(list_xpath)
next_month_xpath = '//*[@id="content"]/div[2]/div/section/header[1]/h2/a[2]/i'
next_month_button = driver.find_element_by_xpath(next_month_xpath)
venue_html_list = []
# http://docs.seleniumhq.org/docs/04_webdriver_advanced.jsp#explicit-and-implicit-waits
def _wait_function():
time.sleep(3)
# Getting duplicates with the selenium wait function..
# WebDriverWait(driver, 10).until(
# EC.presence_of_element_located((By.CLASS_NAME, "c-calendar-list__item")))
try:
# grab current month plus next two
for _ in range(3):
list_button.click()
_wait_function()
venue_html_list.append(driver.page_source)
next_month_button.click()
_wait_function()
finally:
driver.quit()
self.make_shows('\n'.join(venue_html_list))
else:
print("Shows list already populated")
def get_summaries(self, html):
"""
See Venue.get_summaries.
html dump > '.c-calendar-list__item'
And some page manipulation to get a couple months' worth of concerts.
"""
all_summaries = bs(html, 'html.parser').select('.c-calendar-list__item')
# "main" concerts at HoB will have 'Find Tickets Now' and
# 'Event Details' buttons; filter everything else
button_words = ['Find', 'Tickets', 'Now', 'Event', 'Details']
concert_filter = \
lambda summary: summary.select('.c-calendar-list__venue')[0].text.split() == button_words
show_summaries = list(filter(concert_filter, all_summaries))
return show_summaries
def get_artist_billing(self, summary):
"""
See Venue.get_artist_billing.
summary > '.c-calendar-list__title'
"""
return summary.select('.c-calendar-list__title')[0].text
def get_venue_info(self, summary):
"""
See Venue.get_venue_info.
HoB concerts seem in-house only.
"""
venue_name = self.venue_name
venue_id = self.venue_id
return (venue_name, venue_id)
def get_show_date(self, summary):
"""
See Venue.get_show_date.
Date: summary > '.c-calendar-list__date-date'
Time: summary > '.c-calendar-list__date-time'
"""
# Dates on the site use abbreviations, conform to calendar.month_abbr.
# Get month as number
month_map = {k:v for v, k in enumerate(calendar.month_abbr)}
concert_month_date = summary.select('.c-calendar-list__date-date')
show_month, show_date = concert_month_date[0].text.strip(' ,\n').split()
show_month = month_map[show_month.title()]
if show_month >= TODAY.month:
show_year = TODAY.year
else:
show_year = TODAY.year + 1
show_time = summary.select('.c-calendar-list__date-time')[0].text.strip()
show_time = time.strptime(show_time, '%I:%M%p')
utc_datetime = Venue.make_utc_datetime(
show_year=int(show_year),
show_month=int(show_month),
show_day=int(show_date),
show_hour=show_time.tm_hour,
show_minute=show_time.tm_min)
return utc_datetime
def get_show_price(self, summary):
"""
See Venue.get_show_price.
Ticket prices not directly on HoB site.
"""
return 'Check ticket site for price.'
def get_show_url(self, summary):
"""
See Venue.get_show_url.
'.btn-parent' > link from second item
"""
details_button = summary.select('.btn-parent')[1]
event_page = details_button.find('a', href=True)['href']
return event_page
```
#### File: sift/concerts/utils.py
```python
import logging, os
from .scrapers import (
emptybottle,
bottomlounge,
subterranean,
doubledoor,
houseofblues,
thaliahall,
metro,
lincolnhall,
schubas,
)
logger = logging.getLogger('concerts')
# key is Venue objects's pk; used by scrape_shows mgmt command
SCRAPERS = {
1: emptybottle.EmptyBottle,
2: bottomlounge.BottomLounge,
3: subterranean.Subterranean,
4: doubledoor.DoubleDoor,
5: houseofblues.HouseOfBlues,
6: thaliahall.ThaliaHall,
7: metro.Metro,
8: lincolnhall.LincolnHall,
9: schubas.SchubasTavern,
}
FIXTURES_BASE_DIR = os.path.join(
os.environ.get('SIFT_HOME'),
'concerts',
'fixtures'
)
FIXTURE_DIRS = {
'artists': os.path.join(FIXTURES_BASE_DIR, 'artists'),
'concerts': os.path.join(FIXTURES_BASE_DIR, 'concerts'),
'venues': os.path.join(FIXTURES_BASE_DIR, 'venues'),
}
def get_spotify_id(artist_name):
"""Returns Spotify artist ID, or empty string if none found."""
import requests
from django.utils.http import urlencode
SPOTIFY_QUERY_BASE = 'https://api.spotify.com/v1/search?'
spotify_token = os.environ.get('SPOTIFY_TOKEN')
if spotify_token is None:
import sys
sys.exit("No SPOTIFY_TOKEN env var set. Sad!")
# searching quoted artist name seems to give more accurate
quoted_name = '\"' + artist_name + '\"'
query_str = urlencode([('q', quoted_name), ('type', 'artist')])
response = requests.get(
SPOTIFY_QUERY_BASE + query_str,
headers={"Authorization": "Bearer {}".format(spotify_token)}
)
response.raise_for_status()
artist_id = ''
try:
artist_id = response.json()['artists']['items'][0]['id']
except IndexError as e:
logger.debug("Failed to get Spotify artist ID for {}".format(artist_name))
return artist_id
def make_artist_regex(artist_name):
"""Returns regex string of artist name for storage in DB."""
from string import punctuation
import re
PUNC_RE = re.compile(
'[{}]'.format(punctuation),
flags=re.I|re.M|re.DOTALL
)
no_punc = re.sub(PUNC_RE, r'.?', artist_name)
no_end = re.sub(r'$', '\\\\b', no_punc)
re_string = re.sub(r'^', '\\\\b', no_end)
return re_string
if __name__ == '__main__':
pass
``` |
{
"source": "jravey7/Joe2Music",
"score": 3
} |
#### File: mopidy_touchscreen/graphic_utils/background_manager.py
```python
import pygame
change_speed = 2
class DynamicBackground:
def __init__(self, size):
self.image_loaded = False
self.size = size
self.surface = pygame.Surface(self.size).convert()
self.surface.fill((38, 38, 38))
self.surface_image = pygame.Surface(self.size).convert()
self.update = True
def draw_background(self):
if self.image_loaded:
return self.surface_image.copy()
else:
return self.surface.copy()
def should_update(self):
if self.update:
self.update = False
return True
else:
return False
def set_background_image(self, image):
if image is not None:
image_size = get_aspect_scale_size(image, self.size)
target = pygame.transform.smoothscale(image, image_size)
target.set_alpha(150)
self.image_loaded = True
self.surface_image.fill((0, 0, 0))
pos = ((self.size[0] - image_size[0])/2,
(self.size[1] - image_size[1])/2)
self.surface_image.blit(target, pos)
else:
self.image_loaded = False
self.update = True
def get_aspect_scale_size(img, new_size):
size = img.get_size()
aspect_x = new_size[0] / float(size[0])
aspect_y = new_size[1] / float(size[1])
if aspect_x > aspect_y:
aspect = aspect_x
else:
aspect = aspect_y
new_size = (int(aspect*size[0]), int(aspect*size[1]))
return new_size
```
#### File: mopidy_touchscreen/screens/tracklist.py
```python
from base_screen import BaseScreen
from .main_screen import MainScreen
from ..graphic_utils import ListView, ScreenObjectsManager, TextItem, TouchAndTextItem
from ..input import InputManager
class Tracklist(BaseScreen):
def __init__(self, size, base_size, manager, fonts):
BaseScreen.__init__(self, size, base_size, manager, fonts)
self.size = size
self.base_size = base_size
self.manager = manager
self.list_view = ListView((0, 0), (
self.size[0], self.size[1] -
self.base_size), self.base_size, self.fonts['base'], 1)
self.tracks = []
self.tracks_strings = []
self.update_list()
self.track_started(
self.manager.core.playback.current_tl_track.get())
self.screen_objects = ScreenObjectsManager()
# javey: shuffle button
button = TouchAndTextItem(self.fonts['icon'], u"\ue629 ",
(self.size[0] - 120, self.size[1] - (self.base_size*2)), None)
self.screen_objects.set_touch_object("shuffle", button)
def should_update(self):
return self.list_view.should_update()
def update(self, screen, update_type, rects):
update_all = (update_type == BaseScreen.update_all)
self.list_view.render(screen, update_all, rects)
#javey: update shuffle button location
self.screen_objects.get_touch_object("shuffle").pos = (self.size[0] - 120, self.size[1] - (self.base_size*2));
self.screen_objects.render(screen)
def tracklist_changed(self):
self.update_list()
def update_list(self):
self.tracks = self.manager.core.tracklist.tl_tracks.get()
self.tracks_strings = []
for tl_track in self.tracks:
self.tracks_strings.append(
MainScreen.get_track_name(tl_track.track))
self.list_view.set_list(self.tracks_strings)
def touch_event(self, touch_event):
if touch_event.type == InputManager.click:
clicked = self.list_view.touch_event(touch_event)
if clicked is not None:
x = 0;
# self.manager.core.playback.play(self.tracks[clicked])
else:
# javey: shuffle functionality
clicked = self.screen_objects.get_touch_objects_in_pos(
touch_event.down_pos)
if len(clicked) > 0:
clicked = clicked[0]
if clicked == "shuffle":
self.manager.core.tracklist.shuffle();
def track_started(self, track):
self.list_view.set_active(
[self.manager.core.tracklist.index(track).get()])
``` |
{
"source": "jrawbits/nmtk-1",
"score": 2
} |
#### File: NMTK_apps/NMTK_apps/context_processors.py
```python
from django.conf import settings # import the settings file
def registration_open(request):
# return the value you want as a dictionary. you may add multiple values
# in there.
return {'REGISTRATION_OPEN': settings.REGISTRATION_OPEN}
```
#### File: NMTK_server/data_loaders/BaseDataLoader.py
```python
import logging
import collections
import os
from .BaseDataLoader import *
logger = logging.getLogger(__name__)
class BaseDataLoader(object):
'''
Base class for data loader. Loaders should all support at the
very least the iterable protocol (returning a dictionary of key/value pairs)
for each data set found.
'''
# A list of files that should be unpacked from the archive, it's
# important to note that the first one is the supported file type,
# the others are supporting files.
unpack_list = None
def __init__(self, filelist):
'''
A basic constructor to create a new data loader. In this case
the filelist is a list of prospective files to load - it generally
comes from a zip (or some other) file type.
'''
self.filelist = filelist
def is_supported(self):
raise Exception('This method must be implemented')
def __del__(self):
if hasattr(self, 'temp_file'):
os.unlink(self.temp_file)
```
#### File: management/commands/query_settings.py
```python
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from django.conf import settings
from django.core.urlresolvers import reverse
class Command(BaseCommand):
help = 'List the database type.'
option_list = BaseCommand.option_list + (
make_option('-t', '--type',
action='store_true',
dest='type',
default=False,
help='Print the database type.'),
make_option('-d', '--database',
action='store_true',
dest='database',
default=False,
help='Provide the name of the database.'),
make_option('-u', '--username',
action='store_true',
dest='username',
default=False,
help='Provide the name of the database user.'),
make_option('--self-signed-cert',
action='store_true',
dest='self_signed_cert',
default=False,
help='Return a 1 if the server is using a self signed certificate, 0 otherwise..'),
make_option('--nmtk-server-status',
action='store_true',
dest='nmtk_server_status',
default=False,
help='Return a 1 if the NMTK Server is enabled, 0 otherwise..'),
make_option('--tool-server-status',
action='store_true',
dest='tool_server_status',
default=False,
help='Return a 1 if the Tool Server is enabled, 0 otherwise..'),
make_option('--production',
action='store_true',
dest='production',
default=False,
help='Return a 1 if the NMTK server is in production (minified UI).'),
make_option('--tool-server-url',
action='store_true',
dest='tool_server_url',
default=False,
help='Return the URL for the Tool server.'),
)
def handle(self, *args, **options):
if options['type']:
print getattr(settings, 'DATABASE_TYPE')
elif options['database']:
print settings.DATABASES['default']['NAME']
elif options['username']:
print settings.DATABASES['default']['USER']
elif options['self_signed_cert']:
if settings.SELF_SIGNED_SSL_CERT:
print 1
else:
print 0
elif options['nmtk_server_status']:
if settings.NMTK_SERVER:
print 1
else:
print 0
elif options['tool_server_status']:
if settings.TOOL_SERVER:
print 1
else:
print 0
elif options['production']:
if settings.PRODUCTION:
print 1
else:
print 0
elif options['tool_server_url']:
if settings.SSL:
ssl = 's'
else:
ssl = ''
print 'http{0}://{1}{2}{3}'.format(ssl,
settings.SITE_DOMAIN,
settings.PORT,
reverse('tool_index'))
```
#### File: NMTK_apps/NMTK_server/tasks.py
```python
from celery.task import task
import json
import decimal
import requests
import urlparse
import hmac
import hashlib
import uuid
from django.utils import timezone
from django.conf import settings
from django.core.management.color import no_style
from django.db import connections, transaction
import logging
import shutil
import os
import cStringIO as StringIO
from django.core.exceptions import ObjectDoesNotExist
from NMTK_apps.helpers.data_output import getQuerySet, json_custom_serializer
from NMTK_server.data_loaders.loaders import NMTKDataLoader
from django.core.files import File
from django.contrib.gis import geos
from django.contrib.auth import get_user_model
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from django.core.management.commands import inspectdb
from django.template.loader import render_to_string
from django.core.mail import send_mail, EmailMessage
from django.core.files.base import ContentFile
from django.shortcuts import render
from django.db.models import fields as django_model_fields
from django.db.models import Max, Min, Count
from osgeo import ogr
import imp
import datetime
from django.contrib.gis.geos import GEOSGeometry
import tempfile
#from django.core.serializers.json import DjangoJSONEncoder
logger = logging.getLogger(__name__)
geomodel_mappings = {
ogr.wkbPoint: ('models.PointField',
geos.Point, 'point'),
ogr.wkbGeometryCollection: ('models.GeometryField',
geos.GEOSGeometry, 'point'),
ogr.wkbLineString: ('models.LineStringField',
geos.LineString, 'line'),
ogr.wkbMultiPoint: ('models.MultiPointField',
geos.MultiPoint, 'point'),
ogr.wkbMultiPolygon: ('models.MultiPolygonField',
geos.MultiPolygon, 'polygon'),
ogr.wkbPolygon: ('models.PolygonField',
geos.Polygon, 'polygon'),
ogr.wkbMultiLineString: ('models.MultiLineStringField',
geos.MultiLineString, 'line'),
99: (None, None, 'raster'),
}
# This actually does not get done as a task - it is inline with the
# response from the tool server.
def generate_datamodel(datafile, loader):
def propertymap(data):
output = {}
used = []
c = inspectdb.Command()
for k in data:
att_name, params, notes = inspectdb.Command.normalize_col_name(
c, k, used, False)
# logger.debug('Field %s, %s', att_name, notes)
used.append(att_name)
output[k] = att_name
# logger.debug('Mappings are %s', output)
return output
try:
if loader.is_spatial:
spatial = True
geom_type = loader.info.type
model_type, geos_func, mapfile_type = geomodel_mappings[geom_type]
else:
spatial = False
db_created = False
this_model = None
colors = []
model_content = ['from django.contrib.gis.db import models']
# feature_id=1
for (row, geometry) in loader:
if not db_created:
db_created = True
if datafile.result_field:
min_result = max_result = float(row[datafile.result_field])
# Create the model for this data
model_content.append(
'class Results_{0}(models.Model):'.format(
datafile.pk))
# Add an auto-increment field for it (the PK)
model_content.append(
'{0}nmtk_id=models.AutoField(primary_key=True, )'.format(
' ' * 4))
# model_content.append('{0}nmtk_feature_id=models.IntegerField()'.format(' '*4))
# Add an entry for each of the fields
# So instead of doing this - getting the keys to figure out the
# fields
fields_types = loader.info.fields_types
field_map = propertymap(
(field_name for field_name, type in fields_types))
type_mapping = {str: ('models.TextField', ''),
unicode: ('models.TextField', ''),
# We support up to a 32 digit integer.
int: ('models.DecimalField', 'max_digits=32, decimal_places=0, '),
float: ('models.FloatField', ''),
datetime.date: ('models.DateField', ''),
datetime.time: ('models.TimeField', ''),
bool: ('models.BooleanField', ''),
datetime.datetime: ('models.DateTimeField', ''), }
for field_name, field_type in fields_types:
if field_type not in type_mapping:
logger.info(
'No type mapping exists for type %s (using TextField)!',
field_type)
field_type = str
model_content.append(
"""{0}{1}={2}({3} null=True, db_column='''{4}''')""". format(
' ' * 4,
field_map[field_name],
type_mapping[field_type][0],
type_mapping[field_type][1],
field_name))
if spatial:
model_content.append(
'''{0}nmtk_geometry={1}(null=True, srid=4326, dim={2})'''. format(
' ' * 4, model_type, loader.info.dimensions))
model_content.append(
'''{0}objects=models.GeoManager()'''.format(
' ' * 4,))
model_content.append('''{0}class Meta:'''.format(' ' * 4,))
model_content.append(
'''{0}db_table='userdata_results_{1}' '''.format(
' ' * 8, datafile.pk))
logger.error('working on saving the model datafile!')
datafile.model.save(
'model.py', ContentFile(
'\n'.join(model_content)), save=False)
# logger.debug('\n'.join(model_content))
user_models = imp.load_source(
'%s.models' %
(datafile.pk,), datafile.model.path)
Results_model = getattr(
user_models, 'Results_{0}'.format(
datafile.pk))
database = 'default'
# If using PostgreSQL, then just create the model and go...
dbtype = 'postgis'
connection = connections[database]
cursor = connection.cursor()
for statement in connection.creation.sql_create_model(
Results_model, no_style())[0]:
cursor.execute(statement)
for statement in connection.creation.sql_indexes_for_model(
Results_model, no_style()):
cursor.execute(statement)
this_row = dict((field_map[k], v) for k, v in row.iteritems())
if spatial:
this_row['nmtk_geometry'] = geometry
if datafile.result_field:
try:
logger.debug('Row is %s', this_row)
min_result = min(
float(
this_row[
datafile.result_field.lower()]),
min_result)
max_result = max(
float(
this_row[
datafile.result_field.lower()]),
max_result)
except Exception as e:
logger.exception(
'Result field (%s) is not a float (ignoring)',
datafile.result_field)
else:
min_result = max_result = 1
m = Results_model(**this_row)
try:
m.save(using=database)
except Exception as e:
logger.exception(
'Failed to save record from data file (%s)', this_row)
logger.error(
'The type of data in question was %s (%s)', m, this_row)
raise e
logger.debug(
'Completing transferring results to %s database %s',
dbtype,
datafile.pk,
)
except Exception as e:
logger.exception('Failed to create spatialite results table')
return datafile
logger.error('About to return job back to caller - %s', datafile.pk)
return datafile
@task(ignore_result=True)
def email_user_job_done(job):
context = {'job': job,
'user': job.user,
'tool': job.tool,
'site': Site.objects.get_current()}
logger.debug('Job complete (%s), sending email to %s',
job.tool.name, job.user.email)
subject = render_to_string('NMTK_server/job_finished_subject.txt',
context).strip().replace('\n', ' ')
message = render_to_string('NMTK_server/job_finished_message.txt',
context)
logger.debug(
'Sending job complete notification email to %s',
job.user.email)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL,
[job.user.email, ])
@task(ignore_result=False)
def verify_celery():
'''
A simple task that just returns true - used to verify if celery is actually
working - since we submit a job and wait for its result to come back.
'''
return True
@task(ignore_result=False)
def add_toolserver(name, url, username, remote_ip=None, contact=None, skip_email=False,
verify_ssl=True):
from NMTK_server import models
try:
User = get_user_model()
user = User.objects.get(username=username)
except Exception as e:
raise CommandError('Username specified (%s) not found!' %
username)
m = models.ToolServer(name=name,
server_url=url,
remote_ip=remote_ip,
verify_ssl=verify_ssl,
contact=contact,
created_by=user,
skip_email=skip_email)
m.save()
return m
@task(ignore_resut=True)
def email_tool_server_admin(toolserver):
'''
Email the tool server administrator with the credentials to use/add for
the tool server.
'''
context = {'toolserver': toolserver,
'site': Site.objects.get_current()}
subject = render_to_string('NMTK_server/tool_server_added_notification_subject.txt',
context).strip().replace('\n', ' ')
message = render_to_string('NMTK_server/tool_server_added_notification.txt',
context)
logger.debug(
'Sending tool server added notification email to %s',
toolserver.contact)
message = EmailMessage(subject=subject,
body=message,
from_email=settings.DEFAULT_FROM_EMAIL,
to=[toolserver.contact, ],
attachments=[('nmtk_server.config', toolserver.json_config(), 'application/json',), ])
message.send()
@task(ignore_result=False)
def discover_tools(toolserver):
from NMTK_server import models
if not toolserver.server_url.endswith('/'):
append_slash = '/'
else:
append_slash = ''
# index returns a json list of tools.
try:
try:
tool_list = requests.get(
toolserver.server_url, verify=toolserver.verify_ssl).json()
except:
url = "{0}{1}index".format(toolserver.server_url, append_slash)
tool_list = requests.get(url, verify=toolserver.verify_ssl).json()
logger.debug('Retrieved tool list of: %s', tool_list)
except Exception, e:
logger.exception(
'Failed to reach tool server to retrieve tools: %s', str(e))
tool_list = []
for tool in tool_list:
try:
t = models.Tool.objects.get(tool_server=toolserver,
tool_path=tool)
# Clean up any sample files, we will reload them now.
if hasattr(t, 'toolsampleconfig'):
t.toolsampleconfig.delete()
# Need to iterate and delete so that the files get deleted
# also, since the set delete method won't call the individual
# delete method(s).
for item in t.toolsamplefile_set.all():
item.delete()
# t.toolsamplefile_set.all().delete()
except ObjectDoesNotExist:
t = models.Tool(tool_server=toolserver,
name=tool)
t.active = True
t.tool_path = tool
t.name = tool
t.save()
# Locate all the tools that aren't there anymore and disable them.
for row in models.Tool.objects.exclude(
tool_path__in=tool_list).filter(
active=True,
tool_server=toolserver):
logger.debug('Disabling tool %s', row.name)
row.active = False
row.save()
@task(ignore_result=False)
def cancelJob(job_id, tool_id):
'''
Whenever a job that is active is cancelled, we need to notify the tool
server to cancel the job as well.
The tool doesn't support this (yet), since each tool might be different
'''
from NMTK_server import models
logger = cancelJob.get_logger()
tool = models.Tool.objects.get(pk=tool_id)
logger.debug('Cancelling job %s to tool %s for processing', job_id,
tool)
config_data = job_id
digest_maker = hmac.new(str(tool.tool_server.auth_token),
config_data,
hashlib.sha1)
digest = digest_maker.hexdigest()
files = {'cancel': ('cancel', job_id)}
r = requests.delete(tool.analyze_url, files=files,
headers={'Authorization': digest},
verify=job.tool.tool_server.verify_ssl)
logger.debug(
"Submitted cancellation request for job to %s tool, response was %s (%s)",
tool,
r.text,
r.status_code)
@task(ignore_result=False)
def submitJob(job_id):
'''
Whenever a job status is set to active in the database, the
signal attached to the model causes the job to be submitted.
This causes this task (a celery task) to run, and submit
the job to the tool.
'''
from NMTK_server import models
job = models.Job.objects.get(pk=job_id)
# Get a logger to log status for this task.
logger = submitJob.get_logger()
logger.debug('Submitting job %s to tool %s for processing', job.pk,
job.tool)
configuration = {'analysis settings': job.config}
configuration['job'] = {
'tool_server_id': "%s" % (job.tool.tool_server.tool_server_id,
),
'job_id': str(
job.job_id),
'timestamp': timezone.now().isoformat()}
# cls=DjangoJSONEncoder)
config_data = json.dumps(configuration, default=json_custom_serializer)
digest_maker = hmac.new(str(job.tool.tool_server.auth_token),
config_data,
hashlib.sha1)
digest = digest_maker.hexdigest()
files = {'config': ('config', config_data)}
for jobfile in job.jobfile_set.all():
if jobfile.datafile.processed_file:
files[
jobfile.namespace] = (
jobfile.datafile.processed_file.name,
jobfile.datafile.processed_file)
else:
files[
jobfile.namespace] = (
jobfile.datafile.file.name,
jobfile.datafile.file)
logger.debug('Files for job are %s', files)
r = requests.post(job.tool.analyze_url, files=files,
headers={'Authorization': digest,
'Referer': job.tool.analyze_url},
verify=job.tool.tool_server.verify_ssl)
logger.debug("Submitted job to %s tool, response was %s (%s)",
job.tool, r.text, r.status_code)
if r.status_code != 200:
job.status = job.TOOL_FAILED
js = models.JobStatus(
job=job,
message='Tool failed to accept job (return code %s)' % (
r.status_code,),
category=models.JobStatus.CATEGORY_SYSTEM)
js.save()
job.save()
else:
status_m = models.JobStatus(
message='Submitted job to {0} tool, response was {1} ({2})'.format(
job.tool, r.text, r.status_code),
timestamp=timezone.now(),
job=job,
category=models.JobStatus.CATEGORY_SYSTEM)
status_m.save()
@task(ignore_result=False)
def updateToolConfig(tool):
from NMTK_server import models
json_config = requests.get(
tool.config_url, verify=tool.tool_server.verify_ssl)
try:
config = tool.toolconfig
except:
config = models.ToolConfig(tool=tool)
config_data = json_config.json()
config.json_config = config_data
config.save()
if hasattr(tool, 'toolsampleconfig') and tool.toolsampleconfig.pk:
tool.toolsampleconfig.delete()
tool.toolsamplefile_set.all().delete()
try:
logger.debug('Trying to load sample config for %s', tool.name)
logger.debug('Config is %s', config_data)
if (isinstance(config_data.get('sample', None), (dict,)) and
config_data['sample'].get('config')):
objects_to_save = []
objects_to_delete = []
tsc = models.ToolSampleConfig(
sample_config=config_data['sample']['config'], tool=tool)
objects_to_save.append(tsc)
reqd_fields = ['namespace', 'checksum']
for fconfig in config_data['sample'].get('files', []):
logger.debug('Working with %s', fconfig)
sample_config_fields = {'tool': tool}
for f in reqd_fields:
if f in fconfig:
sample_config_fields[f] = fconfig.get(f)
else:
raise Exception('Missing required field: %s' % (f,))
m = models.ToolSampleFile(**sample_config_fields)
if 'uri' in fconfig:
parsed = urlparse.urlparse(fconfig.get('uri'))
if not parsed.scheme:
if parsed.path[0] == '/':
p = urlparse.urlparse(tool.tool_server.server_url)
fconfig['uri'] = urlparse.urlunparse(
[p.scheme, p.netloc, fconfig['uri'], '', '', ''])
else:
raise Exception(
'Only absolute URLs or fully-qualified URLs allowed')
logger.debug('Attempting to download %s', fconfig['uri'])
data = requests.get(fconfig['uri'], stream=True,
verify=tool.tool_server.verify_ssl)
checksum = hashlib.sha1()
if data.status_code != 200:
raise Exception('Failed to download data file %s',
fconfig['uri'])
logger.debug('Download succeeded!')
with tempfile.TemporaryFile() as f:
for chunk in data.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
checksum.update(chunk)
f.flush()
if checksum.hexdigest() == fconfig['checksum']:
f.seek(0)
# Get the file name
name = os.path.basename(
urlparse.urlparse(fconfig['uri']).path)
if 'content-type' in fconfig:
m.content_type = fconfig['content-type']
elif 'content-type' in data.headers:
m.content_type = data.headers[
'content-type'].partition(';')[0]
else:
t = mimetypes.guess_type(fconfig['uri'])[0]
if t:
m.content_type = t
m.file.save(name, File(f))
objects_to_delete.append(m)
else:
logger.error(
'Checksum MISMATCH (Expected: %s, Computed: %s)',
fconfig['checksum'],
checksum.hexdigest())
for m in objects_to_save:
m2 = models.ToolSampleConfig.objects.filter(tool=m.tool)
if len(m2) == 1:
m.pk = m2[0].pk
[m.save() for m in objects_to_save]
except:
logger.exception('Failed to load tool sample config.')
# If we fail, we need to delete any downloaded files we saved.
[m.delete() for m in objects_to_delete]
# Note: We use update here instead of save, since we want to ensure that
# we don't call the post_save handler, which would result in
# a recursion loop.
logger.debug('Setting tool name to %s', config_data['info']['name'])
# This doesn't call the save method, so we're okay here in preventing a
# loop.
models.Tool.objects.filter(
pk=config.tool.pk).update(
name=config_data['info']['name'])
@task(ignore_result=False)
def importDataFile(datafile, job_id=None):
from NMTK_server import models
datafile.status_message = None
try:
loader = NMTKDataLoader(datafile.file.path,
srid=datafile.srid)
destination = None
for import_file in loader.extract_files():
# Figure out where these files need to go.
if not destination:
destination = os.path.dirname(datafile.file.path)
# the first file we get (when destination is null,it's our first
# loop) is the one that needs to be in the model, handle that
# here...
if datafile.file.path != import_file:
f = open(import_file)
datafile.file.save(os.path.basename(import_file), File(f))
else:
shutil.copyfile(import_file,
os.path.join(destination,
os.path.basename(import_file)))
logger.debug('Created a new file for %s', import_file)
if loader.is_spatial:
datafile.srid = loader.info.srid
datafile.srs = loader.info.srs
datafile.geom_type = loader.info.type
logger.debug('Loader extent is %s', loader.info.extent)
extent = geos.Polygon.from_bbox(loader.info.extent)
logger.debug("Extent is 'srid=%s;%s'::geometry", loader.info.srid,
extent,)
if datafile.srid:
extent.srid = int(loader.info.srid)
extent.transform(4326)
logger.debug("Extent is 'srid=%s;%s'::geometry", 4326,
extent,)
datafile.extent = extent
datafile.feature_count = loader.info.feature_count
if not datafile.description:
datafile.description = loader.info.format
if loader.is_spatial and not datafile.srid:
datafile.status = datafile.IMPORT_FAILED
datafile.status_message = 'Please specify SRID for this file (unable to auto-identify SRID)'
elif not job_id:
datafile.status = datafile.IMPORTED
else:
datafile.status = datafile.IMPORT_RESULTS_COMPLETE
datafile.fields = loader.info.fields
# Create an empty file using ContentFile, then we can overwrite it
# with the desired GeoJSON data.
if loader.is_spatial:
suffix = 'geojson'
else:
suffix = 'json'
if datafile.status in (
datafile.IMPORTED,
datafile.IMPORT_RESULTS_COMPLETE):
if datafile.geom_type == 99:
field_attributes = {}
# This is a raster...
for pos, band in enumerate(loader.dl_instance.bands()):
field_attributes[pos + 1] = {
'type': band.type,
'field_name': 'pixel',
'min': band.min,
'max': band.max}
datafile.field_attributes = field_attributes
elif datafile.feature_count:
logger.error('Working on saving the model!')
datafile.processed_file.save('{0}.{1}'.format(datafile.pk, suffix),
ContentFile(''))
loader.export_json(datafile.processed_file.path)
generate_datamodel(datafile, loader)
# Here we load the spatialite data using the model that was created
# by generate_datamodel. We need to use this to get the range
# and type information for each field...
try:
field_attributes = {}
qs = getQuerySet(datafile)
field_mappings = [(django_model_fields.IntegerField, 'integer', int),
# Required because nmtk_id is an
# autofield..
(django_model_fields.AutoField,
'integer', int,),
(django_model_fields.BooleanField,
'boolean', bool),
# Special case holding FIPS
(django_model_fields.DecimalField,
'float', float),
(django_model_fields.TextField,
'text', None),
(django_model_fields.FloatField,
'float', float),
(django_model_fields.DateField,
'date', None,),
(django_model_fields.TimeField,
'time', None,),
(django_model_fields.DateTimeField,
'datetime', None)]
if qs.count() > 0:
# Get a single row so that we can try to work with the
# fields.
sample_row = qs[0]
for field in sample_row._meta.fields:
field_name = field.name
db_column = field.db_column or field.name
# convert the django field type to a text string.
for ftype, field_type, caster in field_mappings:
if isinstance(field, (ftype,)):
break
else:
logger.info(
'Unable to map field of type %s (this is expected for GIS fields)', type(
field, ))
continue
values_aggregates = qs.aggregate(
Count(field_name, distinct=True))
field_attributes[db_column] = {
'type': field_type,
'field_name': field_name,
'distinct': values_aggregates[
'{0}__count'.format(field_name)]}
if field_attributes[db_column]['distinct'] < 10:
distinct_values = list(
qs.order_by().values_list(
field_name, flat=True).distinct())
if not caster:
field_attributes[db_column][
'values'] = distinct_values
else:
field_attributes[db_column][
'values'] = map(caster, distinct_values)
else:
logger.debug(
'There are more than 10 values for %s (%s), enumerating..',
db_column,
field_attributes[db_column]['distinct'])
# formerly the aggregates happened above - with the count. However, Django doesn't
# allow those aggregates with boolean fields - so here we split it up to only do the
# aggregates in the cases where we have to (i.e.,
# the distinct values is above the threshold.)
values_aggregates = qs.aggregate(
Max(field_name), Min(field_name), )
field_attributes[db_column]['min'] = values_aggregates[
'{0}__min'.format(field_name)]
field_attributes[db_column]['max'] = values_aggregates[
'{0}__max'.format(field_name)]
if caster:
field_attributes[db_column]['min'] = caster(
field_attributes[db_column]['min'])
field_attributes[db_column]['max'] = caster(
field_attributes[db_column]['max'])
datafile.field_attributes = field_attributes
except Exception as e:
logger.exception('Failed to get range for model %s',
datafile.pk)
if job_id:
try:
job = models.Job.objects.get(pk=job_id)
# There might be multiple results files from this job, so we will only
# mark the job as complete if all the results files are
# processed.
if job.status != job.COMPLETE:
results_left = job.job_files.filter(
status=models.DataFile.PROCESSING_RESULTS).count()
if results_left == 0:
job.status = job.COMPLETE
models.JobStatus(message='Job Completed',
timestamp=timezone.now(),
job=job,
category=models.JobStatus.CATEGORY_SYSTEM).save()
elif results_left == 1:
# Handle the potential race condition here - do we really need this?
# sort of. Since it's possible that two files finish post-processing
# at the same time. In such cases, a second should be more than enough
# time to get both committed as complete.
time.sleep(1)
job = models.Job.objects.get(pk=job_id)
if job.status != job.COMPLETE:
results_left = job.job_files.filter(
status=models.DataFile.PROCESSING_RESULTS).count()
if results_left == 0:
job.status = job.COMPLETE
models.JobStatus(message='Job Completed',
timestamp=timezone.now(),
job=job,
category=models.JobStatus.CATEGORY_SYSTEM).save()
except:
logger.exception('Failed to update job status to complete?!!')
except Exception as e:
logger.exception('Failed import process!')
datafile.processed_file = None
if not job_id:
datafile.status = datafile.IMPORT_FAILED
else:
datafile.status = datafile.IMPORT_RESULTS_FAILED
datafile.status_message = "%s" % (e,)
if job_id:
try:
job = models.Job.objects.get(pk=job_id)
job.status = job.POST_PROCESSING_FAILED
except:
logger.exception('Failed to update job status to failed?!!')
if job_id:
job.save()
# Now we need to create the spatialite version of this thing.
datafile.save()
``` |
{
"source": "jrawsthorne/sdkd-cxx",
"score": 2
} |
#### File: pylib/sdkd_cpp/lcbpkg.py
```python
import sys
import os.path
import os
import imp
import shutil
import platform
import urllib.parse
import json
import warnings
import multiprocessing
from subprocess import Popen, PIPE
COMMON_LIB = os.path.dirname(__file__)
COMMON_CACHE = os.path.join(COMMON_LIB, "..", "pkgcache")
COMMON_CACHE = os.path.abspath(COMMON_CACHE)
SRC_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__),
"..", "..", "src"))
MAKECMD = "make -s -j " + str(multiprocessing.cpu_count()) + " "
_versions_json = os.path.join(COMMON_LIB, 'versions.json')
_versions_json = open(_versions_json, "r")
VERSION_INFO = json.load(_versions_json)
def get_version_option_strings(self):
strs = []
for k,v in list(VERSION_INFO['debs'].items()):
strs.append(v)
return strs
def download_if_empty(dst, src):
do_download = False
if not os.path.exists(dst):
do_download = True
else:
if os.stat(dst)[6] == 0:
os.unlink(dst)
do_download = True
if not do_download:
return True
rv = run_command("wget --progress=dot "
"-O {0} {1}".format(dst, src),
assert_ok = True)
return True
_CMD_VERBOSE = os.environ.get('SDKD_BUILD_VERBOSE', False)
def run_command(cmd, assert_ok = False):
if _CMD_VERBOSE:
print("== RUN (START): " + cmd)
rv = os.system(cmd)
if _CMD_VERBOSE:
print("== RUN (EC={0})".format(rv))
print("")
if assert_ok and rv != 0:
assert rv == 0, "Command failed to execute"
return rv
class Common(object):
git_base = 'git://github.com/mrtazz/json-cpp.git'
@classmethod
def get_jsoncpp_libdir(self):
ret = os.path.join(COMMON_CACHE,
"json-cpp-lib-{0}".format(self.get_host_md5()))
return ret
@classmethod
def get_jsoncpp_incdir(self):
return os.path.join(COMMON_CACHE, "json-cpp", "include")
def extract(self):
jp = os.path.join(COMMON_CACHE, "json-cpp")
if not os.path.exists(jp):
run_command("git clone {0} {1}".format(self.git_base, jp))
jplib = self.get_jsoncpp_libdir()
if not os.path.exists(jplib):
oldpwd = os.getcwd()
os.mkdir(jplib)
os.chdir(jp)
mkcmd = MAKECMD + " -f makefiles/gcc/Makefile"
run_command(mkcmd + " clean")
run_command(mkcmd + " staticlibrary")
src = os.path.join(jp, "lib/libjson-cpp.a")
target = os.path.join(jplib, "libjson-cpp.a")
shutil.copy(src, target)
os.chdir(oldpwd)
def get_lcb_libdir(self):
raise NotImplementedError()
def get_lcb_incdir(self):
raise NotImplementedError()
def get_sdkd_dest(self):
raise NotImplementedError()
def get_lcb_dso(self):
raise NotImplementedError()
def build_sdkd(self):
# TODO, make this smarter about re-building targets
sdkd_path = self.get_sdkd_dest()
# For now, the following block is commented out as we want to build
# for multiple versions, and with possible changes to the sdkd itself
# we'd like to rebuild the sdkd itself.
#if os.path.exists(sdkd_path):
# return
run_command(MAKECMD + " -C {0} clean".format(SRC_PATH))
mklines = [
'JSONCPP_LFLAGS=' + self.get_jsoncpp_libdir() + "/libjson-cpp.a",
'JSONCPP_CPPFLAGS=-I' + self.get_jsoncpp_incdir(),
'LCB_CPPFLAGS=-I' + self.get_lcb_incdir(),
]
lcb_lflags = 'LCB_LFLAGS="-L{0} '.format(self.get_lcb_libdir())
lcb_lflags += ' -Wl,-rpath={0} '.format(self.get_lcb_libdir())
lcb_lflags += '-lcouchbase"'
mklines.append(lcb_lflags)
cmd = MAKECMD + ' -C {0} '.format(SRC_PATH)
cmd += " ".join(mklines)
run_command(cmd)
shutil.copy(os.path.join(SRC_PATH, "sdkd_lcb"), self.get_sdkd_dest())
libutil = os.path.join(SRC_PATH, "libcbsdkd.so")
if os.path.exists(libutil):
shutil.copy(libutil, os.path.join(os.path.dirname(self.get_sdkd_dest()),
"libcbsdkd.so"))
@staticmethod
def get_host_md5():
po = Popen("gcc -v 2>&1 | md5sum -", stdout = PIPE, shell = True)
stdout, stderr = po.communicate()
md5, f = stdout.split()
return md5
@staticmethod
def make_base_dir(tag):
pkgcache = os.path.dirname(__file__)
pkgcache = os.path.join(pkgcache, '..', 'pkgcache')
pkgcache = os.path.abspath(pkgcache)
tagstr = "{0}-{1}".format(tag, Common.get_host_md5())
basedir = os.path.join(pkgcache, tagstr)
if not os.path.exists(basedir):
run_command("mkdir -p " + basedir)
return basedir
class PkgCollectionAbstract(object):
class Package(object):
def __init__(self, s):
self.name = s
self.filename = None
self.url = None
def __init__(self, urlbase, strlist):
self.urlbase = urlbase
l = []
arch = platform.uname()[4]
if arch == 'x86_64':
arch = self.ARCHSTR_64
else:
arch = self.ARCHSTR_32
for pkg in strlist:
pkg = pkg.format(arch = arch)
l.append(pkg)
self._l = l
self.arch = arch
def __iter__(self):
return iter(self._l)
def to_url(self, pkgname, **fmtextra):
s = pkgname
if not s.startswith('http'):
s = self.urlbase + pkgname
s = s.format(arch = self.arch, **fmtextra)
return s
def download(self, pkg, basedir):
"""
Tries to download the package pkg into the directory basedir.
Returns the local path of the package
"""
pkgpath = os.path.join(basedir, os.path.basename(pkg))
download_if_empty(pkgpath, self.to_url(pkg))
return pkgpath
class Deb(Common):
class DebPkgCollection(PkgCollectionAbstract):
ARCHSTR_64 = 'amd64'
ARCHSTR_32 = 'i386'
versions = {}
for k, v in list(VERSION_INFO['debian'].items()):
versions[k] = DebPkgCollection(v['urlbase'], v['debs'])
def __init__(self, version):
self.basedir = self.make_base_dir(version + "-deb")
self.versions[version]
self.curvers = version
def extract(self):
super(Deb, self).extract()
pkgs = self.versions[self.curvers]
inst_dir = os.path.join(self.basedir, "inst")
if not os.path.exists(inst_dir):
run_command("mkdir " + inst_dir)
for pkg in pkgs:
pkgpath = pkgs.download(pkg, self.basedir)
run_command("dpkg -x {0} {1}".format(pkgpath, inst_dir))
return self
def get_lcb_libdir(self):
return os.path.join(self.basedir, 'inst', 'usr', 'lib')
def get_lcb_incdir(self):
return os.path.join(self.basedir, 'inst', 'usr', 'include')
def get_sdkd_dest(self):
return os.path.join(self.basedir, 'sdkd')
def build(self):
self.build_sdkd()
return self
class RPM(Common):
class RpmPkgCollection(PkgCollectionAbstract):
ARCHSTR_64 = 'x86_64'
ARCHSTR_32 = 'i686'
elversion = None
@classmethod
def detect_el_version(cls):
"""
Call this to initialize the class
"""
if cls.elversion:
return
el = platform.dist()[2]
if el.startswith('5'):
cls.elversion = '5.5'
elif el.startswith('6'):
cls.elversion = '6.2'
else:
warnings.warn("Can't detect EL version. Fallback to 5.5")
cls.elversion = '5.5'
def to_url(self, pkg):
pkg = super(self.__class__, self).to_url(pkg, el = self.elversion)
return pkg
versions = {}
for k, v in list(VERSION_INFO['redhat'].items()):
versions[k] = RpmPkgCollection(v['urlbase'], v['rpms'])
def __init__(self, version):
self.RpmPkgCollection.detect_el_version()
self.basedir = self.make_base_dir(
"{0}-rpm{1}".format(version, self.RpmPkgCollection.elversion))
self.versions[version]
self.curvers = version
@property
def _pkg_collection(self):
return self.versions[self.curvers]
def extract(self):
super(RPM, self).extract()
pkgs = self._pkg_collection
inst_dir = os.path.join(self.basedir, "inst")
if not os.path.exists(inst_dir):
os.mkdir(inst_dir)
for pkg in pkgs:
pkgpath = pkgs.download(pkg, self.basedir)
oldpwd = os.getcwd()
# Now for the funky command string..
cmd = "rpm2cpio < " + pkgpath
cmd += "| cpio --extract --make-directories --unconditional --quiet"
cmd += " --no-preserve-owner"
os.chdir(inst_dir)
run_command(cmd)
os.chdir(oldpwd)
return self
@property
def _usrpath(self):
return os.path.join(self.basedir, 'inst', 'usr')
def get_lcb_libdir(self):
if self._pkg_collection.arch == self._pkg_collection.ARCHSTR_32:
return os.path.join(self._usrpath, 'lib')
else:
return os.path.join(self._usrpath, 'lib64')
def get_lcb_incdir(self):
return os.path.join(self._usrpath, 'include')
def get_sdkd_dest(self):
return os.path.join(self.basedir, 'sdkd')
def build(self):
return self.build_sdkd()
def _is_archive(name):
suffixes = ['.gz', '.bz2', '.tar']
for suffix in suffixes:
if name.endswith(suffix):
return True
return False
class Source(Common):
def __init__(self, srcpath, configure_args = [], force_rebuild = False):
self.srcpath = srcpath
self.configure_args = configure_args
self.configure_args.append("--enable-debug")
def _handle_non_dir(self):
tarball = None
if os.path.basename(self.srcpath) == self.srcpath:
# Version string, old-style linking
self.srcpath = "{0}libcouchbase-{1}.tar.gz".format(
VERSION_INFO['tarball']['urlbase'], self.srcpath)
if (self.srcpath.lower().startswith("http")):
url = urllib.parse.urlparse(self.srcpath)
path = url[2]
tarball = os.path.basename(path)
tarball = os.path.join(COMMON_CACHE, tarball)
download_if_empty(tarball, self.srcpath)
elif _is_archive(self.srcpath):
tarball = self.srcpath
if not tarball:
return
# Extract the tarball:
basepath = os.path.splitext(tarball)[0]
basepath = os.path.basename(basepath)
basepath = "{0}-tarball-{1}".format(basepath, self.get_host_md5())
basepath = os.path.join(COMMON_CACHE, basepath)
if not os.path.exists(basepath):
os.mkdir(basepath)
cmd = "tar xf {0} --strip-components=1 -C {1}".format(
tarball, basepath)
print(cmd)
run_command(cmd)
self.srcpath = basepath
def extract(self):
super(Source, self).extract()
self._handle_non_dir()
self._inst = os.path.join(self.srcpath, 'INST')
return self
def get_sdkd_dest(self):
return os.path.join(self.srcpath, 'sdkd')
def get_lcb_libdir(self):
return os.path.join(self._inst, 'lib')
def get_lcb_incdir(self):
return os.path.join(self._inst, 'include')
def build(self):
if os.path.exists(self._inst):
return self.build_sdkd()
oldpwd = os.getcwd()
os.chdir(self.srcpath)
cmd = " ".join( ["./configure "] + self.configure_args)
cmd += " --prefix={0}".format(self._inst)
rv = run_command(cmd)
if rv != 0:
raise Exception("Configure failed")
rv = run_command(MAKECMD + " install")
if rv != 0:
raise Exception("Build failed")
os.chdir(oldpwd)
return self.build_sdkd()
DEFAULT_CLASS = None
DEFAULT_VERSION = '2.0.0-beta2'
dist = platform.dist()
if dist[0] in ('debian', 'ubuntu'):
DEFAULT_CLASS = 'Deb'
elif dist[0] == 'redhat':
DEFAULT_CLASS = 'RPM'
else:
DEFAULT_CLASS = 'Source'
DEFAULT_VERSION = '2.0.0-beta2'
if __name__ == "__main__":
# Test the classes, if possible..
if len(sys.argv) > 1:
cls = sys.argv[1]
version = sys.argv[2]
globals()[cls](version).extract().build()
sys.exit(0)
Deb('1.0.4').extract().build()
Deb('2.0.0-beta2').extract().build()
RPM('1.0.4').extract().build()
RPM('2.0.0-beta2').extract().build()
Source('1.0.4').extract().build()
Source('1.0.5').extract().build()
Source('1.0.6').extract().build()
#Source('1.0.7').extract().build()
Source('2.0.0beta2').extract().build()
``` |
{
"source": "jraximus/codingame-solutions",
"score": 4
} |
#### File: codingame-solutions/easy/chuck-norris.py
```python
import sys
import math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
message = raw_input()
# Write an action using print
# To debug: print >> sys.stderr, "Debug messages..."
def make_7_bit(base_2_ascii):
result = base_2_ascii
for x in range(0, 7 - len(base_2_ascii)):
result = "0" + result
return result
result = ""
base = -1
count = ""
previous = -1
result_space_appender = ""
base_2_ascii = ""
for c in message:
base_10_ascii = ord(c)
base_2_ascii += make_7_bit(bin(base_10_ascii)[2:])
index = 0
while index < len(base_2_ascii):
bit = base_2_ascii[index]
if base == -1:
if bit == "1":
base = "0"
else:
base = "00"
index += 1
previous = bit
count = "0"
else:
if bit == previous:
count += "0"
index += 1
else:
result += result_space_appender + base + " " + count
result_space_appender = " "
base = -1
if base != -1 and index == len(base_2_ascii):
result += result_space_appender + base + " " + count
print >> sys.stderr, message
print >> sys.stderr, base_10_ascii
print >> sys.stderr, base_2_ascii
print result
``` |
{
"source": "jraycok/phd",
"score": 3
} |
#### File: jraycok/phd/app.py
```python
import urllib
import json
import os
import requests
import math
from flask import Flask
from flask import request
from flask import make_response
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def makeWebhookResult(req):
if (req.get("result").get("action") == "pilihsnacknya"):
a=req.get("result").get("resolvedQuery")
a=int(a)
if(a==1):
b="anda membeli kripik singkong (4.000)"
bb=" kripik singkong"
elif(a==2):
b="anda membeli makroni pedas (4.000)"
bb=" makroni pedas"
elif(a==3):
b="anda membeli makroni asin (4.000)"
bb=" makroni asin"
return {
"speech": b+"\n"+"masukkan jumlah yang ingin anda pesan"+"\n"+"masukkan dengan menuliskan .0 dibagian akhir"+"\n"+"contoh memesan 2 : 2.0" ,
"displayText": b+"\n"+"masukkan jumlah yang ingin anda pesan"+"\n"+"masukkan dengan menuliskan .0 dibagian akhir"+"\n"+"contoh memesan 2 : 2.0",
#"data": {},
#"contextOut": [],
"source": b+"\n"+"masukkan jumlah yang ingin anda pesan"+"\n"+"masukkan dengan menuliskan .0 dibagian akhir"+"\n"+"contoh memesan 2 : 2.0"
}
if (req.get("result").get("action") == "jumlah"):
a=req.get("result").get("resolvedQuery")
aa=a.split(".")
aaa=int(aa[0])
total=aaa*4000
return {
"speech": "anda telah memesan"+str(a)+"bungkus dengan total harga "+str(total),
"displayText": "anda telah memesan"+str(a)+"bungkus dengan total harga "+str(total),
#"data": {},
#"contextOut": [],
"source": "anda telah memesan"+str(a)+"bungkus dengan total harga "+str(total),
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 4040))
print ("Starting app on port %d" %(port))
app.run(debug=False, port=port, host='0.0.0.0')
``` |
{
"source": "jraygauthier/nixos_secure_factory",
"score": 2
} |
#### File: src/nsft_cache_utils/dir.py
```python
import os
import time
import shutil
import hashlib
import logging
from abc import ABC, abstractmethod
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Optional, Callable, List, Set, TypeVar
from nsft_system_utils.file import write_text_file_content, touch_file
try:
from _pytest.fixtures import FixtureRequest as _FixtureRequestT
_with_pytest = True
except ModuleNotFoundError:
_FixtureRequestT = Any
_with_pytest = False
PyTestFixtureRequestT = _FixtureRequestT
OptPyTestFixtureRequestT = Optional[_FixtureRequestT]
OptCopyIgnoreFnT = Optional[Callable[[str, List[str]], Set[str]]]
@dataclass
class CacheDirState:
path: Optional[Path]
valid: bool
class ICacheDirProvider(ABC):
@abstractmethod
def mk_cache_dir(
self, module_filename: Path, cache_id: str
) -> CacheDirState:
pass
OptICacheDirProvider = Optional[ICacheDirProvider]
def _mk_unique_cache_str_for(module_filename: Path, cache_id: str) -> str:
# Some program such as gpg do not work well with long files names.
# Using a short hash of what would have been the dir name fixes
# those cases.
composed_str = f"nsft-{module_filename}-{cache_id}"
hashed_str = \
hashlib.sha256(composed_str.encode()).hexdigest()[0:12]
return hashed_str
class DefaultCacheDirProvider(ICacheDirProvider):
def mk_cache_dir(
self, module_filename: Path, cache_id: str) -> CacheDirState:
module_dir = Path(module_filename).parent
unique_hashed_str = _mk_unique_cache_str_for(module_filename, cache_id)
cache_dir = module_dir.joinpath(
"__pycache__", "nsft", unique_hashed_str)
cache_dir_exists = cache_dir.exists()
return CacheDirState(path=cache_dir, valid=cache_dir_exists)
class DisabledCacheDirProvider(ICacheDirProvider):
def mk_cache_dir(
self, module_filename: Path, cache_id: str) -> CacheDirState:
return CacheDirState(path=None, valid=False)
def _is_dir_caching_disabled() -> bool:
no_dir_cache_env_var = os.environ.get("NSF_TEST_LIB_NO_DIR_CACHE", "0")
if "1" == no_dir_cache_env_var:
return True
return False
def obtain_cache_dir(
module_filename: Path,
cache_id: str,
stale_after_s: Optional[float] = None,
cache_dir_provider: OptICacheDirProvider = None
) -> CacheDirState:
if stale_after_s is None:
# Defaults to 30 minutes.
stale_after_s = 60 * 30
if _is_dir_caching_disabled():
cache_dir_provider = DisabledCacheDirProvider()
elif cache_dir_provider is None:
cache_dir_provider = DefaultCacheDirProvider()
prov_dir_state = cache_dir_provider.mk_cache_dir(module_filename, cache_id)
if prov_dir_state.path is None:
assert not prov_dir_state.valid
# No possible cache for unknown reason. Caching might be disabled or
# file system used by the cache provider read-only.
return CacheDirState(path=None, valid=False)
cache_dir = prov_dir_state.path
cache_dir_exists = prov_dir_state.valid
cache_last_accessed_token = cache_dir.joinpath(".nsft-cache-last-accessed-token")
try:
cache_dir.mkdir(parents=True, exist_ok=True)
touch_file(cache_last_accessed_token)
except OSError as e:
if 30 != e.errno:
raise # re-raise.
# Read-only file system. No possible cache.
return CacheDirState(path=None, valid=False)
if not cache_dir_exists:
cache_last_accessed_token.unlink()
return CacheDirState(path=cache_dir, valid=False)
cache_mtime_s = os.stat(cache_dir).st_mtime
cache_stale_s = cache_mtime_s + stale_after_s
current_time_s = time.time()
assert cache_mtime_s <= current_time_s
if current_time_s < cache_stale_s:
return CacheDirState(path=cache_dir, valid=True)
# Stale cache. Recreate.
shutil.rmtree(cache_dir)
cache_dir.mkdir()
return CacheDirState(path=cache_dir, valid=False)
def copy_ignore_gpg_home_dir(src, names):
logging.warning(f"src: {src}, names: {names}")
return names
_LoadDirContentRetT = TypeVar('_LoadDirContentRetT')
def create_dir_content_cached(
module_filename: Path,
dir: Path,
generate_dir_content_fn: Callable[[Path], _LoadDirContentRetT],
stale_after_s: Optional[float] = None,
cache_dir_provider: OptICacheDirProvider = None,
copy_ignore_fn: OptCopyIgnoreFnT = None,
load_dir_content_fn: Optional[Callable[[Path], _LoadDirContentRetT]] = None,
) -> _LoadDirContentRetT:
def default_load_dir_content(in_path: Path) -> _LoadDirContentRetT:
pass
if load_dir_content_fn is None:
load_dir_content_fn = default_load_dir_content
cache_id = generate_dir_content_fn.__name__
cache_state = obtain_cache_dir(
module_filename,
generate_dir_content_fn.__name__,
stale_after_s=stale_after_s,
cache_dir_provider=cache_dir_provider
)
if cache_state.valid:
assert cache_state.path is not None
shutil.rmtree(dir)
shutil.copytree(cache_state.path, dir, ignore=copy_ignore_fn)
return load_dir_content_fn(dir)
if cache_state.path is None:
return generate_dir_content_fn(dir)
generate_dir_content_fn(cache_state.path)
# Write some info about what module / function gave rise to this cache.
cache_info = cache_state.path.joinpath(".nsft-cache-info")
write_text_file_content(
cache_info, [
f"{module_filename}::{cache_id}"]
)
shutil.rmtree(dir)
shutil.copytree(cache_state.path, dir, ignore=copy_ignore_fn)
return load_dir_content_fn(dir)
# There is not much I can do to fix complexity here as indent
# is artificial.
if _with_pytest: # noqa C901
class PyTestCacheDirProvider(ICacheDirProvider):
def __init__(self, request: _FixtureRequestT) -> None:
self._request = request
def _mk_pytest_cache_dir(
self, cache_key: str, hashed_dir_name: str) -> Optional[Path]:
try:
# Some program such as gpg do not work well with long files names.
# Using a short hash of what would have been the dir name fixes
# those cases.
cache_dir_str = str(self._request.config.cache.makedir(
hashed_dir_name))
except OSError as e:
if 30 != e.errno:
raise # re-raise
# Read-only file-system.
return None
cache_dir = Path(cache_dir_str)
assert cache_dir.exists()
self._request.config.cache.set(cache_key, str(cache_dir))
return cache_dir
def mk_cache_dir(
self, module_filename: Path, cache_id: str) -> CacheDirState:
module_name = module_filename.stem
unique_hashed_str = \
_mk_unique_cache_str_for(module_filename, cache_id)
cache_key = f"nsft-cache/{module_name}/{cache_id}/{unique_hashed_str}"
existing_cache_dir_str = self._request.config.cache.get(cache_key, None)
cache_dir = None
if existing_cache_dir_str is not None:
cache_dir = Path(existing_cache_dir_str)
if cache_dir.exists():
return CacheDirState(path=cache_dir, valid=True)
hashed_dir_name = f"nsft-{unique_hashed_str}"
cache_dir = self._mk_pytest_cache_dir(cache_key, hashed_dir_name)
return CacheDirState(path=cache_dir, valid=False)
def create_dir_content_cached_from_pytest(
module_filename: Path,
dir: Path,
generate_dir_content_fn: Callable[[Path], _LoadDirContentRetT],
request: Optional[_FixtureRequestT],
stale_after_s: Optional[float] = None,
copy_ignore_fn: OptCopyIgnoreFnT = None,
load_dir_content_fn: Optional[Callable[[Path], _LoadDirContentRetT]] = None,
) -> _LoadDirContentRetT:
if request is None:
cache_dir_provider = None
else:
cache_dir_provider = PyTestCacheDirProvider(request)
return create_dir_content_cached(
module_filename,
dir,
generate_dir_content_fn,
stale_after_s,
cache_dir_provider,
copy_ignore_fn,
load_dir_content_fn)
```
#### File: src/nsft_pgp_utils/_colon_listing_impl.py
```python
from typing import Iterator, Dict, List, Union, Optional, Any
from .ctx_auth_types import OptGpgAuthContext
from .ctx_proc_types import OptGpgProcContextSoftT
from .key_types import GpgKeyWExtInfo, GpgKeyExtInfo
from .process import gpg_stdout_it
from .trust_types import mk_gpg_calc_trust_from_colon_sep_field_value
GpgKeyWithColonSubKeyEntryDictT = Dict[str, str]
GpgKeyWithColonSubKeyEntriesListT = List[GpgKeyWithColonSubKeyEntryDictT]
GpgKeyWithColonEntryDictT = Dict[str, Union[str, GpgKeyWithColonSubKeyEntriesListT]]
class GpgKeyWithColonParsingError(Exception):
pass
def _list_gpg_keys_with_colon_lines_it(
secret: bool = False,
auth: OptGpgAuthContext = None,
proc: OptGpgProcContextSoftT = None
) -> Iterator[str]:
args = [
"--list-options", "show-only-fpr-mbox",
]
if secret:
args.append("--list-secret-keys")
else:
args.append("--list-keys")
args.append("--with-colons")
yield from gpg_stdout_it(
args, proc=proc, auth=auth)
def _list_gpg_keys_with_colon_records_it( # noqa C901
secret: bool = False,
auth: OptGpgAuthContext = None,
proc: OptGpgProcContextSoftT = None
) -> Iterator[GpgKeyWithColonEntryDictT]:
out_rec: Optional[GpgKeyWithColonEntryDictT] = None
line_it = _list_gpg_keys_with_colon_lines_it(
secret, auth, proc)
record_type = "sec" if secret else "pub"
sub_record_type = "ssb" if secret else "sub"
subs_record_key = "ssbs" if secret else "subs"
for l in line_it:
ft = l[0:3] # Field type.
if "tru" == ft:
continue
if record_type == ft:
if out_rec is not None:
yield out_rec
out_rec = {
ft: l
}
continue
if out_rec is None:
continue
if sub_record_type == ft:
subs = out_rec.setdefault(subs_record_key, list())
assert isinstance(subs, list)
subs.append({
ft: l
})
continue
tgt_rec: Dict[str, Any] = out_rec
try:
subs = out_rec[subs_record_key]
assert isinstance(subs, list)
tgt_rec = subs[-1]
except KeyError:
pass
if ft in tgt_rec:
raise GpgKeyWithColonParsingError(f"Unexpected duplicate field type: {ft}.")
tgt_rec.setdefault(ft, l)
if out_rec is not None:
yield out_rec
def _list_gpg_keys_w_ext_info_it(
secret: bool = False,
auth: OptGpgAuthContext = None,
proc: OptGpgProcContextSoftT = None
) -> Iterator[GpgKeyWExtInfo]:
record_type = "sec" if secret else "pub"
for d in _list_gpg_keys_with_colon_records_it(secret, auth, proc):
fpr_l = d["fpr"]
assert isinstance(fpr_l, str)
fpr_field = fpr_l.split(':')[9]
fpr_field = fpr_field.strip()
uid_l = d["uid"]
assert isinstance(uid_l, str)
fn_eml_field = uid_l.split(':')[9]
fn, eml = fn_eml_field.split("<")
fn = fn.strip()
eml = eml.strip().rstrip(">")
entry_l = d[record_type]
assert isinstance(entry_l, str)
trust_field = entry_l.split(":")[1].strip()
trust = mk_gpg_calc_trust_from_colon_sep_field_value(trust_field)
yield GpgKeyWExtInfo(
fpr=fpr_field,
info=GpgKeyExtInfo(
email=eml,
user_name=fn,
trust=trust
)
)
```
#### File: src/nsft_pgp_utils/errors.py
```python
import shlex
from typing import Optional, List, Union
from subprocess import CalledProcessError
class GpgError(Exception):
pass
_CmdT = Union[str, List[str]]
def _format_cmd(cmd: _CmdT) -> str:
if isinstance(cmd, str):
return cmd
if not isinstance(cmd, list):
return f"{cmd}"
return " ".join(map(lambda x: shlex.quote(x), cmd))
class GpgProcessError(GpgError):
@classmethod
def mk_from(cls, other: CalledProcessError) -> 'GpgProcessError':
return GpgProcessError(**other.__dict__)
def __init__(
self,
returncode: int,
cmd: _CmdT,
output: Optional[str] = None,
stderr: Optional[str] = None
) -> None:
cmd_str = _format_cmd(cmd)
self._impl = CalledProcessError(returncode, cmd_str, output, stderr)
self._cmd = cmd
def __str__(self) -> str:
return self._impl.__str__()
@property
def returncode(self) -> int:
return self._impl.returncode
@property
def cmd(self) -> _CmdT:
return self._cmd
@property
def output(self) -> Optional[str]:
return self._impl.output
@property
def stderr(self) -> Optional[str]:
return self._impl.stderr
@property
def stdout(self) -> Optional[str]:
return self._impl.output
```
#### File: src/nsft_pgp_utils/fixture_initial.py
```python
import shutil
from dataclasses import dataclass
from pathlib import Path
from typing import List, Tuple, Iterable
from .ctx_gen_types import GpgKeyGenInfo
from .ctx_types import (
GpgContext,
GpgContextWExtInfo,
mk_empty_gpg_ctx_w_ext_info,
mk_gpg_ctx_for_user_home_dir,
)
from .home_dir import (
create_and_assign_proper_permissions_to_gpg_home_dir,
create_and_assign_proper_permissions_to_user_home_dir,
)
from .query import query_gpg_context_w_ext_info
from .secret_id import create_gpg_secret_identity
from ._fixture_gen_tools import import_pub_key_for_all_sids_in_ctxs
@dataclass
class _GpgInitialCtxs:
i_ie: GpgContext
i_m: GpgContext
i_f: GpgContext
i_s: GpgContext
i_t: GpgContext
i_z: GpgContext
# i_u: GpgContext
def _mk_gpg_intial_ctxs(
homes_root_dir: Path) -> _GpgInitialCtxs:
def mk_home_dir(user_name: str) -> Path:
return homes_root_dir.joinpath(user_name)
user_names = [
"initial-ie",
"initial-m",
"initial-f",
"initial-s",
"initial-t",
"initial-z"
# "initial_u",
]
ctxs = map(
lambda u: mk_gpg_ctx_for_user_home_dir(mk_home_dir(u)),
user_names)
return _GpgInitialCtxs(
*ctxs
)
@dataclass
class GpgInitialFixture:
# En encrypter who knows about all of the below recipients if any.
i_ie: GpgContextWExtInfo
#
# Not part of any external trust network nor do they know each other.
#
# This is a initial, setup or beginner situation.
#
# Initial contexes.
i_m: GpgContextWExtInfo # Minimal directories exist, no secret id.
i_f: GpgContextWExtInfo # Family: two different secret ids.
i_s: GpgContextWExtInfo # Single: 1 secret id (i.e: no known ids) ultimate trust.
i_t: GpgContextWExtInfo # Twins: Twice the same info, different secret id.
i_z: GpgContextWExtInfo # Zero directories. Nothing exists.
# i_u: GpgContextWExtInfo # Untrusted secret id only (i.e: no other id known).
def _load_fix_ctx(ctx: GpgContext) -> GpgContextWExtInfo:
if ctx.proc.home_dir.exists():
return query_gpg_context_w_ext_info(**ctx.as_proc_auth_dict())
# In this case, we avoid calling any gpg commands those will
# oftentime create files in the gpg directory which we want
# to avoid to preserve this *empty dir* state.
return mk_empty_gpg_ctx_w_ext_info(**ctx.__dict__)
def load_gpg_initial_fixture(
homes_root_dir: Path) -> GpgInitialFixture:
ctxs = _mk_gpg_intial_ctxs(homes_root_dir)
return GpgInitialFixture(
*[_load_fix_ctx(ctx)
for ctx in ctxs.__dict__.values()]
)
ignore_copy_for_gpg_home_dir = shutil.ignore_patterns(
"S.gpg-agent", "S.gpg-agent.*", "S.scdaemon")
def copy_gpg_initial_fixture(
homes_root_dir: Path,
src: GpgInitialFixture) -> GpgInitialFixture:
homes_root_dir.mkdir(exist_ok=True)
for k, v in src.__dict__.items():
src_hd = v.proc.home_dir
src_hd_parent_name = src_hd.parent.name
src_hd_name = src_hd.name
tgt_hdp = homes_root_dir.joinpath(src_hd_parent_name)
tgt_hdp.mkdir()
tgt_hd = tgt_hdp.joinpath(src_hd_name)
if src_hd.exists():
shutil.copytree(src_hd, tgt_hd, ignore=ignore_copy_for_gpg_home_dir)
return load_gpg_initial_fixture(homes_root_dir)
def generate_gpg_initial_fixture(
homes_root_dir: Path) -> GpgInitialFixture:
ctxs = _mk_gpg_intial_ctxs(homes_root_dir)
@dataclass
class _GenInst:
secret_ids: Iterable[GpgKeyGenInfo]
w_min_dirs: bool
def mk_gen_inst(
sids: Iterable[Tuple[str, str]],
w_min_dirs: bool) -> _GenInst:
skgs = (
GpgKeyGenInfo(
user_name=f"{<NAME>",
email=f"{<EMAIL>"
) for first_name, email_local_part in sids)
return _GenInst(skgs, w_min_dirs)
cases: List[Tuple[List[Tuple[str, str]], bool]] = [
([("InitialEncrypterE", "initial-encrypter-e")], False),
([], True),
([("InitialManF", "initial-man-f"), ("InitialWifeF", "initial-wife-f")], False),
([("InitialSingleS", "initial-single-s")], False),
([("InitialTwinT", "initial-twin-t")] * 2, False),
([], False),
]
ginsts = (mk_gen_inst(*x) for x in cases)
def gen_ctx(g_inst: _GenInst, gpg_ctx: GpgContext) -> GpgContextWExtInfo:
if g_inst.w_min_dirs:
create_and_assign_proper_permissions_to_gpg_home_dir(
**gpg_ctx.as_proc_dict())
else:
create_and_assign_proper_permissions_to_user_home_dir(
gpg_ctx.proc.home_dir.parent)
for sid in g_inst.secret_ids:
create_gpg_secret_identity(
sid.email, sid.user_name, gpg_ctx.auth, proc=gpg_ctx.proc)
return _load_fix_ctx(gpg_ctx)
fix = GpgInitialFixture(
*[gen_ctx(gi, ps) for (gi, ps) in zip(
ginsts, ctxs.__dict__.values())]
)
ctx = fix.i_ie
in_ctxs = [c for c in fix.__dict__.values() if c is not ctx]
import_pub_key_for_all_sids_in_ctxs(ctx, in_ctxs)
return load_gpg_initial_fixture(homes_root_dir)
```
#### File: src/nsft_pgp_utils/home_dir.py
```python
import os
import subprocess
from pathlib import Path
from nsft_system_utils.permissions_simple import get_file_mode_simple
from .process import OptGpgProcContextSoftT, ensure_gpg_proc_ctx, run_gpg
def _create_and_assign_proper_permissions_to_dir(
target_dir: Path,
mode: int
) -> None:
if not target_dir.exists():
target_dir.mkdir(parents=True, exist_ok=True)
if mode != get_file_mode_simple(target_dir):
target_dir.chmod(mode)
def create_and_assign_proper_permissions_to_user_home_dir(
home_dir: Path
) -> None:
_create_and_assign_proper_permissions_to_dir(home_dir, 0o700)
def create_and_assign_proper_permissions_to_gpg_home_dir(
proc: OptGpgProcContextSoftT = None
) -> None:
proc = ensure_gpg_proc_ctx(proc)
gpg_home_dir_already_exists = os.path.exists(proc.home_dir)
_create_and_assign_proper_permissions_to_dir(proc.home_dir, 0o700)
pkeys_subdir = proc.home_dir.joinpath("private-keys-v1.d")
_create_and_assign_proper_permissions_to_dir(pkeys_subdir, 0o700)
if gpg_home_dir_already_exists:
return
# Force automated creation of missing files.
args = [
"--list-keys",
]
run_gpg(
args, check=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
proc=proc)
```
#### File: src/nsft_pgp_utils/trust.py
```python
from .ctx_proc_types import OptGpgProcContextSoftT
from .process import run_gpg
from .ctx_auth_types import OptGpgAuthContext
from .io_import import import_gpg_ui_otrust
from .key_types import GpgKeyWUIOwnerTrust
from .trust_types import GpgOwnerTrust
def trust_gpg_key(
fpr: str,
trust: GpgOwnerTrust,
auth: OptGpgAuthContext = None,
proc: OptGpgProcContextSoftT = None
) -> None:
ui_otrust = [
GpgKeyWUIOwnerTrust(fpr, trust),
]
import_gpg_ui_otrust(
ui_otrust,
# auth=auth,
proc=proc)
def sign_gpg_key(
fpr: str,
auth: OptGpgAuthContext = None,
proc: OptGpgProcContextSoftT = None
) -> None:
args = [
"--batch",
"--yes",
"--quick-sign-key", f"{fpr}"
]
run_gpg(
args, text=True, check=True, auth=auth, proc=proc)
def sign_and_trust_gpg_key(
fpr: str,
trust: GpgOwnerTrust,
auth: OptGpgAuthContext = None,
proc: OptGpgProcContextSoftT = None
) -> None:
sign_gpg_key(fpr, auth, proc)
trust_gpg_key(fpr, trust, auth, proc)
```
#### File: src/nsft_system_utils/permissions_simple.py
```python
import grp
import os
import pwd
from pathlib import Path
from typing import Optional
def get_file_mode(filename: Path) -> int:
return filename.stat().st_mode
def get_file_mode_str(filename: Path) -> str:
return oct(get_file_mode(filename))
def get_file_mode_simple(filename: Path) -> int:
return get_file_mode(filename) & 0o000777
def get_file_mode_simple_str(filename: Path) -> str:
return oct(get_file_mode_simple(filename))
def get_file_uid(file: Path) -> int:
return file.stat().st_uid
def get_file_gid(file: Path) -> int:
return file.stat().st_gid
def get_file_owner(file: Path) -> str:
return pwd.getpwuid(get_file_uid(file)).pw_name
def get_file_group(file: Path) -> str:
return grp.getgrgid(get_file_gid(file)).gr_name
def change_file_mode_uid_gid(
filename: Path,
mode: Optional[int] = None,
uid: Optional[int] = None,
gid: Optional[int] = None
) -> None:
if mode is not None:
os.chmod(filename, mode)
need_chown = uid is not None or gid is not None
if need_chown:
uid = uid or get_file_uid(filename)
gid = gid or get_file_gid(filename)
os.chown(filename, uid, gid)
```
#### File: nsf-test-lib/tests/conftest.py
```python
from pathlib import Path
from typing import Callable
import pytest
from _pytest.tmpdir import TempPathFactory
from nsft_cache_utils.dir import (OptPyTestFixtureRequestT,
PyTestFixtureRequestT)
from nsft_system_utils.permissions import call_chmod
from test_lib.gpg_ctx_checks import (check_minimal_gpg_home_dir_empty,
check_minimal_gpg_home_dir_w_secret_id)
from test_lib.gpg_ctx_fixture_gen import (
GpgContextWGenInfo, generate_gpg_ctx_empty_minimal_dirs_cached,
GpgEncryptDecryptBasicFixture,
generate_gpg_ctx_empty_no_dirs_cached,
generate_gpg_ctx_w_2_distinct_secret_ids_cached,
generate_gpg_ctx_w_2_same_user_secret_ids_cached,
generate_gpg_ctx_w_secret_id_cached,
generate_gpg_encrypt_decrypt_basic_fixture_cached,
generate_gpg_initial_fixture_cached,
GpgInitialFixture)
_GpgCtxGenFnT = Callable[[Path, OptPyTestFixtureRequestT], GpgContextWGenInfo]
@pytest.fixture
def tmp_root_homes_dir(tmp_path_factory: TempPathFactory) -> Path:
return tmp_path_factory.mktemp("root_homes")
@pytest.fixture
def tmp_user_home_dir(tmp_path_factory: TempPathFactory) -> Path:
return tmp_path_factory.mktemp("home_user")
@pytest.fixture
def tmp_export_dir(tmp_path_factory: TempPathFactory) -> Path:
return tmp_path_factory.mktemp("export_dir")
def _mk_gpg_ctx_w_info_fixture_no_checks(
gen_fn: _GpgCtxGenFnT,
tmp_factory: TempPathFactory,
request: OptPyTestFixtureRequestT
) -> GpgContextWGenInfo:
home_dir = tmp_factory.mktemp("home_user")
gpg_ctx = gen_fn(home_dir, request)
return gpg_ctx
def _mk_gpg_ctx_w_info_fixture(
gen_fn: _GpgCtxGenFnT,
tmp_factory: TempPathFactory,
request: OptPyTestFixtureRequestT
) -> GpgContextWGenInfo:
gpg_ctx = _mk_gpg_ctx_w_info_fixture_no_checks(
gen_fn, tmp_factory, request)
check_minimal_gpg_home_dir_w_secret_id(gpg_ctx.proc)
return gpg_ctx
def _mk_ro_gpg_ctx_w_info_fixture(
gen_fn: _GpgCtxGenFnT,
tmp_factory: TempPathFactory,
request: OptPyTestFixtureRequestT
) -> GpgContextWGenInfo:
gpg_ctx = _mk_gpg_ctx_w_info_fixture(gen_fn, tmp_factory, request)
call_chmod(gpg_ctx.proc.home_dir, "a-w", recursive=True)
return gpg_ctx
@pytest.fixture
def gpg_ctx_empty_no_dirs(
request: PyTestFixtureRequestT,
tmp_path_factory: TempPathFactory) -> GpgContextWGenInfo:
gpg_ctx = _mk_gpg_ctx_w_info_fixture_no_checks(
generate_gpg_ctx_empty_no_dirs_cached,
tmp_path_factory, request)
return gpg_ctx
@pytest.fixture
def gpg_ctx_empty_minimal_dirs(
request: PyTestFixtureRequestT,
tmp_path_factory: TempPathFactory) -> GpgContextWGenInfo:
gpg_ctx = _mk_gpg_ctx_w_info_fixture_no_checks(
generate_gpg_ctx_empty_minimal_dirs_cached,
tmp_path_factory, request)
check_minimal_gpg_home_dir_empty(gpg_ctx.proc)
return gpg_ctx
@pytest.fixture(scope="session")
def gpg_ctx_w_secret_id_ro(
request: PyTestFixtureRequestT,
tmp_path_factory: TempPathFactory) -> GpgContextWGenInfo:
return _mk_ro_gpg_ctx_w_info_fixture(
generate_gpg_ctx_w_secret_id_cached,
tmp_path_factory, request)
@pytest.fixture
def gpg_ctx_w_secret_id(
request: PyTestFixtureRequestT,
tmp_path_factory: TempPathFactory) -> GpgContextWGenInfo:
return _mk_gpg_ctx_w_info_fixture(
generate_gpg_ctx_w_secret_id_cached,
tmp_path_factory, request)
@pytest.fixture
def gpg_ctx_w_2_distinct_secret_ids(
request: PyTestFixtureRequestT,
tmp_path_factory: TempPathFactory) -> GpgContextWGenInfo:
return _mk_gpg_ctx_w_info_fixture(
generate_gpg_ctx_w_2_distinct_secret_ids_cached,
tmp_path_factory, request)
@pytest.fixture
def gpg_ctx_w_2_same_user_secret_ids(
request: PyTestFixtureRequestT,
tmp_path_factory: TempPathFactory) -> GpgContextWGenInfo:
return _mk_gpg_ctx_w_info_fixture(
generate_gpg_ctx_w_2_same_user_secret_ids_cached,
tmp_path_factory, request)
@pytest.fixture
def gpg_encrypt_decrypt_basic(
request: PyTestFixtureRequestT,
tmp_root_homes_dir: Path
) -> GpgEncryptDecryptBasicFixture:
return generate_gpg_encrypt_decrypt_basic_fixture_cached(
tmp_root_homes_dir, request)
@pytest.fixture
def gpg_initial(
request: PyTestFixtureRequestT,
tmp_root_homes_dir: Path
) -> GpgInitialFixture:
return generate_gpg_initial_fixture_cached(
tmp_root_homes_dir, request)
```
#### File: nsf-test-lib/tests/test_0900_pgp_utils_initial_ctx.py
```python
import logging
from pathlib import Path
from nsft_pgp_utils.home_dir import create_and_assign_proper_permissions_to_gpg_home_dir
from nsft_pgp_utils.ctx_proc_types import mk_gpg_proc_ctx_for_user_home_dir
from test_lib.gpg_ctx_checks import (
check_minimal_gpg_home_dir_empty,
check_minimal_gpg_home_dir_w_secret_id,
)
from test_lib.gpg_ctx_fixture_gen import (
generate_gpg_ctx_w_2_distinct_secret_ids_cached,
generate_gpg_ctx_w_2_same_user_secret_ids_cached,
generate_gpg_ctx_w_secret_id_cached,
)
LOGGER = logging.getLogger(__name__)
def test_create_and_assign_proper_permissions_to_gpg_home_dir(
tmp_user_home_dir: Path) -> None:
LOGGER.info("tmp_user_home_dir: %s", type(tmp_user_home_dir))
proc = mk_gpg_proc_ctx_for_user_home_dir(tmp_user_home_dir)
create_and_assign_proper_permissions_to_gpg_home_dir(proc=proc)
check_minimal_gpg_home_dir_empty(proc)
def test_create_gpg_secret_identity(
request, tmp_user_home_dir: Path) -> None:
gpg_ctx = generate_gpg_ctx_w_secret_id_cached(tmp_user_home_dir, request)
check_minimal_gpg_home_dir_w_secret_id(gpg_ctx.proc)
def test_create_gpg_secret_identity_twice(
request, tmp_user_home_dir: Path) -> None:
gpg_ctx = generate_gpg_ctx_w_2_distinct_secret_ids_cached(
tmp_user_home_dir, request)
check_minimal_gpg_home_dir_w_secret_id(gpg_ctx.proc)
def test_create_gpg_secret_identity_twice_same_user(
request, tmp_user_home_dir: Path) -> None:
gpg_ctx = generate_gpg_ctx_w_2_same_user_secret_ids_cached(
tmp_user_home_dir, request)
check_minimal_gpg_home_dir_w_secret_id(gpg_ctx.proc)
```
#### File: nsf-secrets-deploy-tools/tests/conftest.py
```python
import os
from pathlib import Path
import pytest
from _pytest.tmpdir import TempPathFactory
from nsft_cache_utils.dir import PyTestFixtureRequestT
from nsft_system_utils.file import write_text_file_content
from nsft_pgp_utils.fixture_initial import copy_gpg_initial_fixture
from test_lib.gpg_ctx_fixture_gen import (
GpgEncryptDecryptBasicFixture,
GpgInitialFixture,
WhoIToCtxMapping,
generate_gpg_encrypt_decrypt_basic_fixture_cached,
generate_gpg_initial_fixture_cached,
get_i_fix_ctx_for,
generate_gpg_initial_fixture_encrypted_exports,
generate_gpg_encrypted_files_basic
)
@pytest.fixture(scope="module")
def tmp_root_homes_dir_enc_dec_ro(tmp_path_factory: TempPathFactory) -> Path:
return tmp_path_factory.mktemp("root-homes-enc-dec-ro")
@pytest.fixture(scope="module")
def gpg_encrypt_decrypt_basic_ro(
request: PyTestFixtureRequestT,
tmp_root_homes_dir_enc_dec_ro: Path
) -> GpgEncryptDecryptBasicFixture:
# TODO: It is however not possible to mark gpg home dir as ro.
# We will assume that uses of this fixture **do not** mutate
# the gpg home dir.
return generate_gpg_encrypt_decrypt_basic_fixture_cached(
tmp_root_homes_dir_enc_dec_ro, request)
@pytest.fixture(scope="module")
def tmp_root_homes_dir_init_ro(tmp_path_factory: TempPathFactory) -> Path:
return tmp_path_factory.mktemp("root-homes-init-ro")
@pytest.fixture(scope="module")
def gpg_initial_ro(
request: PyTestFixtureRequestT,
tmp_root_homes_dir_init_ro: Path
) -> GpgInitialFixture:
return generate_gpg_initial_fixture_cached(
tmp_root_homes_dir_init_ro, request)
@pytest.fixture
def tmp_root_homes_dir_init(tmp_path_factory: TempPathFactory) -> Path:
return tmp_path_factory.mktemp("root-homes-init")
@pytest.fixture
def gpg_initial(
gpg_initial_ro: GpgInitialFixture,
tmp_root_homes_dir_init: Path
) -> GpgInitialFixture:
return copy_gpg_initial_fixture(
tmp_root_homes_dir_init, gpg_initial_ro)
@pytest.fixture(scope="module")
def src_pgp_tmp_dir(tmp_path_factory: TempPathFactory) -> Path:
return tmp_path_factory.mktemp("src-pgp")
@pytest.fixture(scope="module")
def src_pgp_decrypt_dir(
src_pgp_tmp_dir: Path,
gpg_encrypt_decrypt_basic_ro: GpgEncryptDecryptBasicFixture
) -> Path:
return generate_gpg_encrypted_files_basic(
src_pgp_tmp_dir, gpg_encrypt_decrypt_basic_ro)
@pytest.fixture(scope="function")
def tgt_pgp_tmp_dir(tmp_path_factory: TempPathFactory) -> Path:
return tmp_path_factory.mktemp("tgt_pgp")
@pytest.fixture(scope="function")
def tgt_pgp_decrypt_dir(tgt_pgp_tmp_dir: Path) -> Path:
tmp_dir = tgt_pgp_tmp_dir
def write_dummy_files_to(d: Path):
fn = d.joinpath("dummy.txt")
dummy_content = [
"Dummy linu1."
"Dummy line2"
]
write_text_file_content(fn, dummy_content)
fn_ro = d.joinpath("dummy-ro.txt")
write_text_file_content(fn_ro, dummy_content)
os.chmod(fn_ro, mode=0o444)
dir = tmp_dir.joinpath("dummy-dir")
os.mkdir(dir)
write_dummy_files_to(dir)
dir_ro = tmp_dir.joinpath("dummy-dir-ro")
os.mkdir(dir_ro)
write_dummy_files_to(dir_ro)
os.chmod(dir_ro, mode=0o555)
return tmp_dir
@pytest.fixture(scope="module")
def src_gnupg_keyring_deploy_dir(
src_pgp_tmp_dir: Path,
gpg_initial_ro: GpgInitialFixture
) -> Path:
return generate_gpg_initial_fixture_encrypted_exports(
src_pgp_tmp_dir, gpg_initial_ro)
@pytest.fixture
def tgt_gnupg_keyring_deploy_who_to_ctx_map(
gpg_initial: GpgInitialFixture
) -> WhoIToCtxMapping:
return lambda who: get_i_fix_ctx_for(gpg_initial, who)
```
#### File: nsf_factory_common_install/cli/_ctx_device_list.py
```python
from abc import abstractmethod
from typing import Iterable
import click
from ..types_device import DeviceInstanceWIdWTypeWStateWStateFile
from ._ctx import CliCtxDbBase, get_cli_ctx_db_base
CliCtxDbDeviceInstance = DeviceInstanceWIdWTypeWStateWStateFile
class CliCtxDbWDeviceList(CliCtxDbBase):
@abstractmethod
def list_device_instances(self) -> Iterable[CliCtxDbDeviceInstance]:
pass
def get_cli_ctx_db_w_device_list(ctx: click.Context) -> CliCtxDbWDeviceList:
out = get_cli_ctx_db_base(ctx)
assert isinstance(out, CliCtxDbWDeviceList)
return out
```
#### File: cli/device_common_ssh_auth_dir/cli.py
```python
from typing import Optional
import click
from nsf_factory_common_install.cli.options import (
cli_default_user_option,
ensure_user_id_or_user_factory_user_id,
)
from nsf_factory_common_install.click.sh_comp import (
is_click_requesting_shell_completion,
)
from nsf_factory_common_install.repo_project import mk_project_repo
from nsf_ssh_auth_dir.cli.nsf_ssh_auth_dir import CliCtxDbInterface
from nsf_ssh_auth_dir.cli.nsf_ssh_auth_dir import cli as cli_base
from nsf_ssh_auth_dir.cli.nsf_ssh_auth_dir import init_cli_ctx, mk_cli_context_settings
class CliCtxDb(CliCtxDbInterface):
def __init__(self, ctx: click.Context) -> None:
pass
@click.group(
cls=click.CommandCollection,
sources=[cli_base],
context_settings=mk_cli_context_settings(
mk_db=CliCtxDb
)
)
@cli_default_user_option()
@click.pass_context
def cli(ctx: click.Context, user_id: Optional[str]) -> None:
"""Ssh authorization tool for nixos-secure-factory projects.
Operates on the *cfg*'s **common** *auth-dir*.
That is, the set of authorizations shared by all devices.
You can get more information about the target authorization
directory using the `info` sub-command.
Note that it remains **you responsability** to add / commit /
push your changes to *version control*. We however provide
some `git` helpers under `[cmd] git`.
"""
if is_click_requesting_shell_completion():
return
project = mk_project_repo()
user_id = ensure_user_id_or_user_factory_user_id(user_id, project)
init_cli_ctx(
ctx,
repo=project.device_cfg.ssh_auth.dir,
user_id=user_id
)
def run_cli() -> None:
cli()
```
#### File: cli/device_state/cli.py
```python
from typing import Iterable, Optional
import click
from nsf_factory_common_install.repo_project import mk_project_repo
from ..options import (
cli_default_device_option,
ensure_device_cfg_repo_device_by_id_or_current,
)
from ._ctx import (
CliCtx,
CliCtxDbDeviceInstance,
CliCtxDbInterface,
init_cli_ctx,
mk_cli_context_settings,
pass_cli_ctx,
)
from .checkout import checkout
from .create import create
from .field import field
from .file import file_
class CliCtxDb(CliCtxDbInterface):
def __init__(self, ctx: click.Context) -> None:
self.project = mk_project_repo()
def get_current_device(self) -> Optional[CliCtxDbDeviceInstance]:
return self.project.current_device.get_instance_from_default_repo_opt()
def list_device_instances(self) -> Iterable[CliCtxDbDeviceInstance]:
return self.project.device_cfg.iter_instances()
def get_device_instance(
self, device_id: str) -> CliCtxDbDeviceInstance:
return self.project.device_cfg.get_instance_for(device_id)
def list_device_states(self) -> Iterable[str]:
# TODO: request the nsf-ssh-auth lib for this information instead.
try:
yield from (
d.stem for d in self.project.device_cfg.ssh_auth.dir.joinpath(
"authorized-on").iterdir())
except FileNotFoundError:
pass
@click.group(
context_settings=mk_cli_context_settings(
mk_db=CliCtxDb
)
)
@cli_default_device_option()
@click.pass_context
def cli(ctx: click.Context, device_id: Optional[str]) -> None:
"""Operations on a the device state file part of the the
device configuration repository.
Defaults to operating on the *current device* / currently
checked-out device when device not explicitly specified.
Note however the print and field r/w target only the device
state directory in the device cfg (e.g.:
`[my-device-cfg-repo]/device/[my-device-id]/device-info.json`).
The checkout does bring the in-repo state to
`.current-device.yaml` for further customizations.
See also `device-current-state` targetting only the workspace
version of the device state (aka `.current-device.yaml`).
"""
project = mk_project_repo()
device = ensure_device_cfg_repo_device_by_id_or_current(device_id, project)
cfg_state_file = device.state_file
ws_state_file = project.workspace.current_device.state_file
init_cli_ctx(
ctx,
device=device,
rw_target_file=cfg_state_file,
explicit_device_id=device_id,
checkout_device_repo=project.device_cfg,
checkout_target_file=ws_state_file
)
@cli.command(name="info")
@pass_cli_ctx
def _info(ctx: CliCtx) -> None:
click.echo(
f"target-device-id: {str(ctx.device.id)}")
click.echo(
"target-filename: "
f"'{str(ctx.rw_target_file.filename)}'")
if ctx.checkout_device_repo is not None:
click.echo(
"checkout-device-repo: "
f"'{str(ctx.checkout_device_repo.dir)}'")
if ctx.checkout_target_file is not None:
click.echo(
"checkout-target-filename: "
f"'{str(ctx.checkout_target_file.filename)}'")
cli.add_command(file_)
cli.add_command(field)
cli.add_command(checkout)
cli.add_command(create)
def run_cli() -> None:
cli()
```
#### File: src/nsf_factory_common_install/file_device_state.py
```python
from pathlib import Path
from typing import List, Optional
from .types_device_state import DeviceIdWType, DeviceState, DeviceStatePlainT
from ._state_persistance_tools import (
dump_plain_state_to_file,
format_plain_state_as_yaml_str,
load_state_from_file_plain,
StateFileError
)
class DeviceStateFileError(Exception):
pass
class DeviceStateFileAccessError(DeviceStateFileError):
pass
class DeviceStateFileFormatError(DeviceStateFileError):
pass
def parse_device_state_field_id(in_d: DeviceStatePlainT) -> str:
out = in_d['identifier']
if not isinstance(out, str):
raise DeviceStateFileFormatError(
f"'id' field should be a string was instead: {type(out)}")
return out
def parse_device_state_field_type(in_d: DeviceStatePlainT) -> str:
out = in_d['type']
if not isinstance(out, str):
raise DeviceStateFileFormatError(
f"'type' field should be a string was instead: {type(out)}")
return out
def parse_device_state_fields_id_w_type(
in_d: DeviceStatePlainT) -> DeviceIdWType:
return DeviceIdWType(
parse_device_state_field_id(in_d),
parse_device_state_field_type(in_d)
)
def parse_device_state_field_factory_installed_by(
in_d: DeviceStatePlainT) -> Optional[List[str]]:
out = in_d.get('factory-installed-by', None)
for u in out:
if not isinstance(u, str):
raise DeviceStateFileFormatError(
"'factory-installed-by' field should be a "
"string was instead: {type(u)}")
return out
def parse_device_state(in_d: DeviceStatePlainT) -> DeviceState:
try:
return DeviceState(
id=parse_device_state_field_id(in_d),
type=parse_device_state_field_type(in_d),
hostname=in_d['hostname'],
ssh_port=in_d['ssh-port'],
gpg_id=in_d.get('gpg-id', None),
factory_installed_by=in_d.get('factory-installed-by', None),
)
except KeyError as e:
raise DeviceStateFileFormatError(
f"Missing mandatory field: {str(e)}") from e
def load_device_state_from_file_plain(
filename: Path) -> DeviceStatePlainT:
try:
return load_state_from_file_plain(filename)
except StateFileError as e:
raise DeviceStateFileAccessError(
f"Cannot load device state file: {str(e)}")
def load_device_state_from_file(
filename: Path) -> DeviceState:
di_plain = load_device_state_from_file_plain(filename)
return parse_device_state(di_plain)
def load_device_id_from_device_state_file(
filename: Path) -> str:
di_plain = load_device_state_from_file_plain(filename)
return parse_device_state_field_id(di_plain)
def load_device_type_from_device_state_file(
filename: Path) -> str:
di_plain = load_device_state_from_file_plain(filename)
return parse_device_state_field_type(di_plain)
def load_device_id_w_type_from_device_state_file(
filename: Path) -> DeviceIdWType:
di_plain = load_device_state_from_file_plain(filename)
return parse_device_state_fields_id_w_type(di_plain)
def dump_plain_device_state_to_file(
state: DeviceStatePlainT,
out_filename: Path
) -> None:
return dump_plain_state_to_file(state, out_filename)
def format_plain_device_state_as_yaml_str(
state: DeviceStatePlainT) -> str:
return format_plain_state_as_yaml_str(state)
class DeviceStateFile:
def __init__(self, filename: Path) -> None:
self._filename = filename
@property
def filename(self) -> Path:
return self._filename
def load(self) -> DeviceState:
return load_device_state_from_file(self.filename)
def load_plain(self) -> DeviceStatePlainT:
return load_device_state_from_file_plain(self.filename)
def load_field_id(self) -> str:
return load_device_id_from_device_state_file(
self.filename)
def load_field_type(self) -> str:
return load_device_type_from_device_state_file(
self.filename)
def dump_plain(self, state: DeviceStatePlainT) -> None:
return dump_plain_device_state_to_file(
state, self.filename)
```
#### File: src/nsf_factory_common_install/file_factory_state.py
```python
from pathlib import Path
from .types_factory_state import FactoryState, FactoryStatePlainT, FactoryStateUser
from ._state_persistance_tools import (
dump_plain_state_to_file,
load_state_from_file_plain,
StateFileError
)
class FactoryStateFileError(Exception):
pass
class FactoryStateFileAccessError(FactoryStateFileError):
pass
class FactoryStateFileFormatError(FactoryStateFileError):
pass
def load_factory_state_from_file_plain(filename: Path) -> FactoryStatePlainT:
try:
return load_state_from_file_plain(filename)
except StateFileError as e:
raise FactoryStateFileAccessError(
f"Cannot load factory state file: {str(e)}")
def parse_factory_state_user(in_d: FactoryStatePlainT) -> FactoryStateUser:
try:
user_d = in_d['user']
return FactoryStateUser(
id=user_d['id'],
full_name=user_d['full-name'],
email=user_d['email']
)
except KeyError as e:
raise FactoryStateFileFormatError(
f"Missing mandatory field: {str(e)}") from e
def parse_factory_state(in_d: FactoryStatePlainT) -> FactoryState:
return FactoryState(
user=parse_factory_state_user(in_d)
)
def load_factory_state_from_file(
filename: Path) -> FactoryState:
fi_d = load_factory_state_from_file_plain(filename)
return parse_factory_state(fi_d)
def load_factory_state_user_from_file(
filename: Path) -> FactoryStateUser:
fi_d = load_factory_state_from_file_plain(filename)
return parse_factory_state_user(fi_d)
def dump_plain_factory_state_to_file(
state: FactoryStatePlainT,
out_filename: Path
) -> None:
dump_plain_state_to_file(state, out_filename)
class FactoryStateFile:
def __init__(self, filename: Path) -> None:
self._filename = filename
@property
def filename(self) -> Path:
return self._filename
def load(self) -> FactoryState:
return load_factory_state_from_file(self.filename)
def load_plain(self) -> FactoryStatePlainT:
return load_factory_state_from_file_plain(self.filename)
def load_user(self) -> FactoryStateUser:
return load_factory_state_user_from_file(
self.filename)
def load_field_user_id(self) -> str:
# TODO: As this is a common operation, make this as fast and
# robust as possible by only parsing the id field.
return self.load_user().id
def dump_plain(self, state: FactoryStatePlainT) -> None:
return dump_plain_factory_state_to_file(
state, self.filename)
```
#### File: src/nsf_factory_common_install/prompt.py
```python
from typing import Optional
def prompt_for_user_approval(
prompt_str: Optional[str] = None
) -> bool:
if prompt_str is None:
prompt_str = "Continue"
r = input("{} (y/n)? ".format(prompt_str))
print("\n")
approval_given = (r == 'Y' or r == 'y')
return approval_given
```
#### File: src/nsf_factory_common_install/repo_project.py
```python
from abc import abstractmethod
from typing import Optional
from .file_device_state import DeviceStateFileError
from .repo_device_cfg import DeviceCfgRepo, DeviceCfgRepoInstance, mk_device_cfg_repo
from .repo_workspace import (
WorkspaceRepo,
WorkspaceRepoFactory,
WorspaceRepoCurrentDevice,
mk_workspace_repo,
)
from .types_device import (
DeviceInstanceUnspecifiedError,
DeviceInstanceWIdWTypeWStateWStateFile,
DeviceState,
DeviceStateFile,
DeviceStatePlainT,
)
ProjectFactory = WorkspaceRepoFactory
class ProjectRepoDevice(DeviceInstanceWIdWTypeWStateWStateFile):
@property
@abstractmethod
def id(self) -> str:
pass
@property
@abstractmethod
def type_id(self) -> str:
pass
@property
@abstractmethod
def state_file(self) -> DeviceStateFile:
pass
@property
@abstractmethod
def state(self) -> DeviceState:
pass
@property
@abstractmethod
def state_plain(self) -> DeviceStatePlainT:
pass
@abstractmethod
def get_instance_from_default_repo(
self) -> DeviceCfgRepoInstance:
"""Return the corresponding instance from the default
device repository.
Raises:
DeviceInstanceUnspecifiedError:
When for some reason the corresponding device cannot
be determined / inferred (most likely because the current
device state file does not exits).
"""
pass
def get_instance_from_default_repo_opt(
self) -> Optional[DeviceCfgRepoInstance]:
"""Return the corresponding instance from the default
device repository or `None` when unspecified.
See `get_instance_from_default_repo` for the variant raising
exceptions.
"""
try:
return self.get_instance_from_default_repo()
except DeviceInstanceUnspecifiedError:
return None
class ProjectRepoDefaultDevice(ProjectRepoDevice):
def __init__(
self,
device_id: str,
default_repo: DeviceCfgRepo
) -> None:
self._default_repo = default_repo
self._default_repo_instance = default_repo.get_instance_for(device_id)
@property
def id(self) -> str:
return self._default_repo_instance.id
@property
def type_id(self) -> str:
return self._default_repo_instance.type_id
@property
def state_file(self) -> DeviceStateFile:
return self._default_repo_instance.state_file
@property
def state(self) -> DeviceState:
return self._default_repo_instance.state
@property
def state_plain(self) -> DeviceStatePlainT:
return self._default_repo_instance.state_plain
def get_instance_from_default_repo(
self) -> DeviceCfgRepoInstance:
return self._default_repo_instance
# IDEA: `get_instance_from_repo("repo-id")`.
# IDEA: We could implement a repo override scheme for the fields.
class ProjectRepoCurrentDevice(ProjectRepoDevice):
"""The interface for accessing the current device of a project.
Currently identical to the project device. However, this is
a placeholder for any current device specific additions.
"""
pass
class ProjectRepoDefaultCurrentDevice(ProjectRepoCurrentDevice):
def __init__(
self,
ws_device: WorspaceRepoCurrentDevice,
default_repo: DeviceCfgRepo
) -> None:
self._ws_device = ws_device
self._default_repo = default_repo
@property
def id(self) -> str:
return self._ws_device.id
@property
def type_id(self) -> str:
return self._ws_device.type_id
@property
def state_file(self) -> DeviceStateFile:
return self._ws_device.state_file
@property
def state(self) -> DeviceState:
return self._ws_device.state
@property
def state_plain(self) -> DeviceStatePlainT:
return self._ws_device.state_plain
def get_instance_from_default_repo(
self) -> DeviceCfgRepoInstance:
try:
device_id = self._ws_device.state_file.load_field_id()
except DeviceStateFileError as e:
raise DeviceInstanceUnspecifiedError(
"Cannot determine the current device because: "
f"{str(e)}"
)
return self._default_repo.get_instance_for(device_id)
# IDEA: `get_instance_from_repo("repo-id")`.
# IDEA: We could implement a repo override scheme for the fields.
class ProjectRepo:
"""The whole project repo set seen as a whole."""
@property
@abstractmethod
def workspace(self) -> WorkspaceRepo:
"""Return the workspace repository for this project."""
pass
@property
@abstractmethod
def device_cfg(self) -> DeviceCfgRepo:
"""Return the (default) device configuration repository
for this project."""
pass
@property
@abstractmethod
def factory(self) -> ProjectFactory:
"""Return the factory associated to this project.
Mostly information about the user driving this nsf.
"""
pass
@property
@abstractmethod
def current_device(self) -> ProjectRepoCurrentDevice:
"""Return a high level interface to the currently checked out device
for this project."""
pass
@abstractmethod
def get_device_by_id(
self, device_id: str) -> ProjectRepoDevice:
"""Return a high level interface to a specific device for
this project."""
pass
class ProjectDefaultRepo(ProjectRepo):
"""The whole project repo set (default version) seen as a whole.
"""
def __init__(
self,
workspace: WorkspaceRepo,
device_cfg: DeviceCfgRepo
) -> None:
self._workspace = workspace
self._device_cfg = device_cfg
@property
def workspace(self) -> WorkspaceRepo:
return self._workspace
@property
def device_cfg(self) -> DeviceCfgRepo:
return self._device_cfg
@property
def factory(self) -> ProjectFactory:
return self._workspace.factory
@property
def current_device(self) -> ProjectRepoCurrentDevice:
return ProjectRepoDefaultCurrentDevice(
self._workspace.current_device, self.device_cfg)
def get_device_by_id(
self, device_id: str) -> ProjectRepoDevice:
return ProjectRepoDefaultDevice(
device_id, self.device_cfg)
def mk_project_repo(
workspace: Optional[WorkspaceRepo] = None,
device_cfg: Optional[DeviceCfgRepo] = None
) -> ProjectRepo:
"""Create a nsf project repository instance.
This helper factory function allow one to create a default
instance when specifying no parameters changing only what
needs to differ from the defaults.
This is usually the entry point for accessing any information
about the project.
Any cli / gui should start by creating / customizing its project
instance and from there pass the instance or any subset
of it around.
Doing this, it should be easy to have a centralize way to
configure specialized nsf projects.
"""
# IDEA: Load customizations from a `.nsf-project.yaml` file at
# the root of the repo. Theses would allow us not to depend on any
# environment variables. Instead, the env var would allow one to
# override specific fields.
if workspace is None:
workspace = mk_workspace_repo()
if device_cfg is None:
device_cfg = mk_device_cfg_repo()
return ProjectDefaultRepo(workspace, device_cfg)
```
#### File: src/nsf_factory_common_install/_state_persistance_tools.py
```python
from pathlib import Path
from typing import Dict, Any, Iterator
import json
import yaml
class StateFileError(Exception):
pass
class StateFileAccessError(StateFileError):
pass
class StateFileFormatError(StateFileError):
pass
StatePlainT = Dict[str, Any]
def _load_state_from_json_file_plain(
filename: Path) -> StatePlainT:
try:
with open(filename) as f:
# We want to preserve key order. Json already does that.
out = json.load(f)
except FileNotFoundError as e:
raise StateFileAccessError(str(e))
except json.decoder.JSONDecodeError as e:
raise StateFileFormatError(f"Not a valid json file: {str(e)}") from e
assert out is not None
return out
def _load_state_from_yaml_file_plain(
filename: Path) -> StatePlainT:
try:
with open(filename) as f:
# We want to preserve key order.
# Yaml already does that on load.
out = yaml.safe_load(f)
except FileNotFoundError as e:
raise StateFileAccessError(str(e))
assert out is not None
return out
def load_state_from_file_plain(
filename: Path) -> StatePlainT:
if ".yaml" == filename.suffix:
return _load_state_from_yaml_file_plain(filename)
assert ".json" == filename.suffix
return _load_state_from_json_file_plain(filename)
def _dump_plain_state_to_yaml_file(
state: StatePlainT,
out_filename: Path
) -> None:
with open(out_filename, 'w') as of:
# We want to preserve key order, thus the `sort_keys=False`.
yaml.safe_dump(state, of, sort_keys=False)
def _dump_state_to_json_file(
state: StatePlainT,
out_filename: Path
) -> None:
with open(out_filename, 'w') as of:
# We want to preserve key order, thus the `sort_keys=False`.
json.dump(
state,
of,
sort_keys=False,
indent=2,
separators=(',', ': ')
)
def dump_plain_state_to_file(
state: StatePlainT,
out_filename: Path
) -> None:
if ".yaml" == out_filename.suffix:
return _dump_plain_state_to_yaml_file(state, out_filename)
assert ".json" == out_filename.suffix
return _dump_state_to_json_file(state, out_filename)
def dump_plain_state_as_yaml_lines(
state: StatePlainT,
) -> Iterator[str]:
# TODO: Find a way to perform the dump iteratively / in a
# streaming fashion.
out_str = yaml.safe_dump(state, sort_keys=False)
for l in out_str.splitlines(keepends=True):
yield l
def format_plain_state_as_yaml_str(
state: StatePlainT) -> str:
if not state:
return ""
return "".join(dump_plain_state_as_yaml_lines(state))
```
#### File: tests/installed/conftest.py
```python
def pytest_runtest_setup(item):
pass
"""
if "1" != os.environ.get("PKG_NSF_FACTORY_INSTALL_IN_ENV"):
pytest.skip(
"Should be run only from build environement. "
"See `PKG_NSF_FACTORY_INSTALL_IN_ENV`.")
"""
```
#### File: tests/lib/conftest.py
```python
import os
import pytest
def pytest_runtest_setup(item):
if "1" != os.environ.get("PKG_NSF_FACTORY_INSTALL_IN_BUILD_ENV"):
pytest.skip(
"Should be run only from build environement. "
"See `PKG_NSF_FACTORY_INSTALL_IN_BUILD_ENV`.")
``` |
{
"source": "jraygauthier/nixos-sf-ssh-auth",
"score": 2
} |
#### File: src/nsf_ssh_auth_dir/cli_group.py
```python
import click
import logging
from .cli_group_member import member
@click.group()
def group() -> None:
"""Ssh groups related commands."""
pass
@group.command()
def add() -> None:
"""Add a new *ssh group*."""
logging.info("group add")
@group.command()
def rm() -> None:
"""Remove and existing *ssh group*."""
logging.info("group rm")
@group.command()
def ls() -> None:
"""List existing *ssh group*."""
logging.info("group ls")
@group.command()
def authorize() -> None:
"""Authorize a *ssh group* to *device user(s)*."""
logging.info("group authorize")
@group.command()
def deauthorize() -> None:
"""De-authorize a *ssh group* from *device user(s)*."""
logging.info("group deauthorize")
group.add_command(member)
```
#### File: cli/tests/test_cli.py
```python
from pathlib import Path
from _pytest.logging import LogCaptureFixture
from nsf_ssh_auth_dir.cli import cli, CliInitCtx
from test_lib.click import invoke_cli
def test_help(caplog: LogCaptureFixture) -> None:
result = invoke_cli(caplog, cli, ['--help'])
assert 0 == result.exit_code
def test_info(caplog: LogCaptureFixture) -> None:
result = invoke_cli(caplog, cli, ['info'])
assert 0 == result.exit_code
def test_info_w_init_ctx(caplog: LogCaptureFixture) -> None:
init_ctx = CliInitCtx(cwd=Path("/my/path"), user_id="my_user_id")
result = invoke_cli(caplog, cli, ['info'], obj=init_ctx)
assert 0 == result.exit_code
# logging.info(f"stdout:\n{result.output}")
assert "/my/path" in result.output
assert "my_user_id" in result.output
``` |
{
"source": "jraygauthier/nsf-ssh-auth",
"score": 2
} |
#### File: cli/nsf_ssh_auth_dir/_group_tools.py
```python
from typing import Iterable, List, NamedTuple, Set, Tuple
from nsf_ssh_auth_dir.click.error import echo_error, CliError
from nsf_ssh_auth_dir.repo import SshAuthDirRepo
from nsf_ssh_auth_dir.repo_groups import (
SshGroup,
SshGroupsRepoAccessError,
SshGroupsRepoKeyAccessError,
SshGroupsRepoDuplicateError,
SshGroupsRepoFileAccessError
)
class GroupInfoUI(NamedTuple):
fmt_name: str
@classmethod
def mk_from(cls, group: SshGroup) -> 'GroupInfoUI':
return cls(group.name)
def add_user_to_groups(
repo: SshAuthDirRepo,
user_id: str,
group_ids: Iterable[str],
force: bool) -> None:
errors: List[Tuple[str, Exception]] = []
for gid in group_ids:
if force:
repo.groups.ensure(gid).add_member_by_id(
user_id, force=force)
else:
try:
repo.groups[gid].add_member_by_id(user_id)
except (SshGroupsRepoAccessError, SshGroupsRepoDuplicateError) as e:
errors.append((gid, e))
for gid, error in errors:
echo_error(f"Error adding user to '{gid}':\n {str(error)}")
if errors:
raise CliError(
f"Was unable to add '{user_id}' to some of the"
"specified groups. See previous log for more details.")
def rm_user_from_all_groups(
repo: SshAuthDirRepo, user_id: str,
force: bool = False
) -> Set[GroupInfoUI]:
out = set()
try:
for g in repo.groups:
try:
# TODO: Consider some warning when not force.
g.rm_member_by_id(user_id, force=force)
out.add(GroupInfoUI.mk_from(g))
except SshGroupsRepoKeyAccessError:
pass
except SshGroupsRepoFileAccessError:
pass
return out
```
#### File: src/nsf_ssh_auth_dir/repo_groups.py
```python
from pathlib import Path
from typing import Set, Type, Optional, Iterator, Tuple, Callable
from .file_groups import (SshGroupsDumper, SshGroupsFileAccessError,
SshGroupsFileError, SshGroupsLoader, SshRawGroup,
SshRawGroups)
from .policy_repo import SshAuthDirRepoPolicy
from .types_base_errors import SshAuthDirRepoError
from .repo_users import SshUsersRepo, SshUser
class SshGroupsRepoError(SshAuthDirRepoError):
pass
class SshGroupsRepoAccessError(SshGroupsRepoError):
pass
class SshGroupsRepoFileAccessError(SshGroupsRepoAccessError):
pass
class SshGroupsRepoKeyAccessError(SshGroupsRepoAccessError, KeyError):
pass
class SshGroupsRepoGroupKeyAccessError(SshGroupsRepoKeyAccessError):
pass
class SshGroupsRepoInvalidUserError(SshGroupsRepoKeyAccessError):
pass
class SshGroupsRepoDuplicateError(SshGroupsRepoError):
pass
class SshGroupsRepoGroupAlreadyExistsError(
SshGroupsRepoDuplicateError):
pass
class SshGroupsRepoUserAlreadyGroupMemberError(
SshGroupsRepoDuplicateError):
pass
def get_groups_repo_err_cls_from_groups_file_err(
e: SshGroupsFileError) -> Type[SshGroupsRepoAccessError]:
if isinstance(e, SshGroupsFileAccessError):
return SshGroupsRepoFileAccessError
return SshGroupsRepoAccessError
class SshGroup:
def __init__(
self,
sa_root_dir: Path,
raw: SshRawGroup,
update_raw_fn: Callable[[SshRawGroup], SshRawGroup],
users: SshUsersRepo
) -> None:
self._sa_root_dir = sa_root_dir
self._raw = raw
self._update_raw_fn = update_raw_fn
self._users = users
@property
def name(self) -> str:
return self._raw.name
@property
def members_names(self) -> Set[str]:
return self._raw.members
def iter_members(
self,
skip_invalid: bool = False
) -> Iterator[SshUser]:
for m_name in self.members_names:
try:
yield self._users[m_name]
except KeyError:
if not skip_invalid:
raise SshGroupsRepoInvalidUserError(
f"'{self.name}' group member '{m_name}' does not "
"correspond to a valid user."
)
@property
def members(self) -> Iterator[SshUser]:
yield from self.iter_members()
def add_member_by_id(
self, user_id: str, force: bool = False) -> None:
if not force and user_id not in self._users:
raise SshGroupsRepoInvalidUserError(
f"Failed to add user '{user_id}' to group "
f"'{self.name}'. User does not exists."
)
if not force and user_id in self._raw.members:
raise SshGroupsRepoUserAlreadyGroupMemberError(
f"Failed to add user '{user_id}' to group "
f"'{self.name}'. Already a member of this group."
)
self._raw.members.add(user_id)
self._raw = self._update_raw_fn(self._raw)
def rm_member_by_id(
self, member_id: str, force: bool = False) -> None:
try:
self._raw.members.remove(member_id)
except KeyError as e:
if not force:
raise SshGroupsRepoKeyAccessError(
f"No such '{self.name}' group member: '{member_id}'. "
"Can't be removed."
) from e
self._raw = self._update_raw_fn(self._raw)
class SshGroupsRepo:
def __init__(
self, dir: Path, stem: str,
policy: SshAuthDirRepoPolicy,
users: SshUsersRepo
) -> None:
self._sa_root_dir = dir
self._policy = policy
self._groups_loader = SshGroupsLoader(dir, stem, policy.file_format)
self._groups_dumper = SshGroupsDumper(dir, stem, policy.file_format)
self._users = users
def _update_raw_group(self, raw_group: SshRawGroup) -> SshRawGroup:
raw = self._load_raw()
if raw_group.name not in raw.ssh_groups:
raise SshGroupsRepoKeyAccessError(
f"No such group: '{raw_group.name}'. Can't be updated.")
raw.ssh_groups[raw_group.name] = raw_group
self._dump_raw(raw)
raw = self._load_raw()
return raw.ssh_groups[raw_group.name]
def _mk_group(
self, raw: SshRawGroup
) -> SshGroup:
return SshGroup(
self._sa_root_dir,
raw,
self._update_raw_group,
self._users
)
def _load_raw(self) -> SshRawGroups:
try:
return self._groups_loader.load()
except SshGroupsFileError as e:
ECls = get_groups_repo_err_cls_from_groups_file_err(e)
raise ECls(str(e)) from e
def _dump_raw(self, raw: SshRawGroups) -> None:
try:
return self._groups_dumper.dump(raw)
except SshGroupsFileError as e:
ECls = get_groups_repo_err_cls_from_groups_file_err(e)
raise ECls(str(e)) from e
@property
def names(self) -> Set[str]:
raw_groups = self._load_raw()
return {
name for name in raw_groups.ssh_groups.keys()
}
def __iter__(self) -> Iterator[SshGroup]:
raw_groups = self._load_raw()
for name, group in raw_groups.ssh_groups.items():
yield self._mk_group(group)
def __contains__(self, groupname: str) -> bool:
raw_groups = self._load_raw()
return groupname in raw_groups.ssh_groups
def _get_w_raw_set(self, groupname: str) -> Tuple[SshGroup, SshRawGroups]:
raw_groups = self._load_raw()
try:
return (
self._mk_group(
raw_groups.ssh_groups[groupname]
),
raw_groups
)
except KeyError as e:
raise SshGroupsRepoKeyAccessError(
f"No such group: '{groupname}'. Can't be returned.") from e
def __getitem__(self, groupname: str) -> SshGroup:
group, _ = self._get_w_raw_set(groupname)
return group
def __delitem__(self, groupname: str) -> None:
raw_groups = self._load_raw()
try:
del raw_groups.ssh_groups[groupname]
except KeyError as e:
raise SshGroupsRepoKeyAccessError(
f"No such group: '{groupname}'. Can't be deleted.") from e
self._dump_raw(raw_groups)
def get(self, groupname: str,
default: Optional[SshGroup] = None) -> Optional[SshGroup]:
try:
return self[groupname]
except SshGroupsRepoKeyAccessError:
return default
def add(
self,
groupname: str,
exist_ok: bool = False
) -> SshGroup:
try:
raw_groups = self._load_raw()
except SshGroupsRepoFileAccessError:
if not self._policy.silent_create_file_groups:
raise # re-raise
raw_groups = SshRawGroups.mk_empty()
if groupname in raw_groups.ssh_groups:
if not exist_ok:
raise SshGroupsRepoGroupAlreadyExistsError(
f"Failed to add group '{groupname}'. Already exists.")
else:
raw_groups.ssh_groups[groupname] = SshRawGroup.mk_new(
groupname)
self._dump_raw(raw_groups)
group = self.get(groupname, None)
assert group is not None
return group
def ensure(self, groupname: str) -> SshGroup:
return self.add(groupname, exist_ok=True)
def rm(
self, groupname: str, force: bool = False
) -> None:
try:
group, raw_groups = self._get_w_raw_set(groupname)
# Should exist as we successfully retrieved the group.
del raw_groups.ssh_groups[groupname]
self._dump_raw(raw_groups)
except (SshGroupsRepoFileAccessError, SshGroupsRepoKeyAccessError):
if not force:
raise # re-raise
``` |
{
"source": "jraygauthier/pytest-monitor",
"score": 2
} |
#### File: pytest-monitor/pytest_monitor/sys_utils.py
```python
import hashlib
import multiprocessing
import os
import platform
import psutil
import socket
import subprocess
import sys
def collect_ci_info():
d = dict()
# Test for jenkins
if "BUILD_NUMBER" in os.environ:
if "BRANCH_NAME" in os.environ or "JOB_NAME" in os.environ:
br = os.environ["BRANCH_NAME"] if "BRANCH_NAME" in os.environ else os.environ["JOB_NAME"]
d = dict(pipeline_branch=br, pipeline_build_no=os.environ["BUILD_NUMBER"], __ci__='jenkinsci')
# Test for CircleCI
if "CIRCLE_JOB" in os.environ and "CIRCLE_BUILD_NUM" in os.environ:
d = dict(pipeline_branch=os.environ["CIRCLE_JOB"], pipeline_build_no=os.environ["CIRCLE_BUILD_NUM"],
__ci__='circleci')
# Test for TravisCI
if "TRAVIS_BUILD_NUMBER" in os.environ and "TRAVIS_BUILD_ID" in os.environ:
d = dict(pipeline_branch=os.environ["TRAVIS_BUILD_ID"], pipeline_build_no=os.environ["TRAVIS_BUILD_NUMBER"],
__ci__='travisci')
# Test for DroneCI
if "DRONE_REPO_BRANCH" in os.environ and "DRONE_BUILD_NUMBER" in os.environ:
d = dict(pipeline_branch=os.environ["DRONE_REPO_BRANCH"], pipeline_build_no=os.environ["DRONE_BUILD_NUMBER"],
__ci__='droneci')
# Test for Gitlab CI
if "CI_JOB_NAME" in os.environ and "CI_PIPELINE_ID" in os.environ:
d = dict(pipeline_branch=os.environ["CI_JOB_NAME"], pipeline_build_no=os.environ["CI_PIPELINE_ID"],
__ci__='gitlabci')
return d
def determine_scm_revision():
for cmd in [r'git rev-parse HEAD', r'p4 changes -m1 \#have']:
p = subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
p_out, _ = p.communicate()
if p.returncode == 0:
return p_out.decode().split('\n')[0]
return ''
def _get_cpu_string():
if platform.system().lower() == "darwin":
old_path = os.environ['PATH']
os.environ['PATH'] = old_path + ':' + '/usr/sbin'
ret = subprocess.check_output('sysctl -n machdep.cpu.brand_string', shell=True).decode().strip()
os.environ['PATH'] = old_path
return ret
elif platform.system().lower() == 'linux':
with open('/proc/cpuinfo', 'r', encoding='utf-8') as f:
lines = [i for i in f if i.startswith('model name')]
if lines:
return lines[0].split(':')[1].strip()
return platform.processor()
class ExecutionContext:
def __init__(self):
self.__cpu_count = multiprocessing.cpu_count()
self.__cpu_vendor = _get_cpu_string()
self.__cpu_freq_base = psutil.cpu_freq().current
self.__proc_typ = platform.processor()
self.__tot_mem = int(psutil.virtual_memory().total / 1024**2)
self.__fqdn = socket.getfqdn()
self.__machine = platform.machine()
self.__arch = platform.architecture()[0]
self.__system = f'{platform.system()} - {platform.release()}'
self.__py_ver = sys.version
def to_dict(self):
return dict(cpu_count=self.cpu_count,
cpu_frequency=self.cpu_frequency,
cpu_type=self.cpu_type,
cpu_vendor=self.cpu_vendor,
ram_total=self.ram_total,
machine_node=self.fqdn,
machine_type=self.machine,
machine_arch=self.architecture,
system_info=self.system_info,
python_info=self.python_info,
h=self.hash())
@property
def cpu_count(self):
return self.__cpu_count
@property
def cpu_frequency(self):
return self.__cpu_freq_base
@property
def cpu_type(self):
return self.__proc_typ
@property
def cpu_vendor(self):
return self.__cpu_vendor
@property
def ram_total(self):
return self.__tot_mem
@property
def fqdn(self):
return self.__fqdn
@property
def machine(self):
return self.__machine
@property
def architecture(self):
return self.__arch
@property
def system_info(self):
return self.__system
@property
def python_info(self):
return self.__py_ver
def hash(self):
hr = hashlib.md5()
hr.update(str(self.__cpu_count).encode())
hr.update(str(self.__cpu_freq_base).encode())
hr.update(str(self.__proc_typ).encode())
hr.update(str(self.__tot_mem).encode())
hr.update(str(self.__fqdn).encode())
hr.update(str(self.__machine).encode())
hr.update(str(self.__arch).encode())
hr.update(str(self.__system).encode())
hr.update(str(self.__py_ver).encode())
return hr.hexdigest()
``` |
{
"source": "jraylan/aiotdlib",
"score": 2
} |
#### File: aiotdlib/api/base_object.py
```python
from __future__ import annotations
import logging
from enum import Enum
from typing import (
Any,
Dict,
Optional,
Type,
)
import ujson
from pydantic import (
BaseConfig,
BaseModel,
Field,
)
logger = logging.getLogger('BaseObject')
class BaseObject(BaseModel):
class Config(BaseConfig):
anystr_strip_whitespace = True
underscore_attrs_are_private = True
use_enum_values = True
json_loads = ujson.loads
json_dumps = ujson.dumps
_all: Optional[Dict[str, Type[BaseObject]]] = {}
ID: str = Field(..., alias='@type')
EXTRA: Optional[dict[str, Any]] = Field({}, alias='@extra')
@staticmethod
def read(data: dict):
if isinstance(data, (list, tuple,)):
return [BaseObject.read(x) for x in data]
if not isinstance(data, dict):
return data
q_type = data.get('@type')
if not bool(q_type):
return data
q_type = q_type.value if isinstance(q_type, Enum) else q_type
object_class = BaseObject._all.get(q_type)
if not bool(object_class):
logger.error(f'Object class not found for @type={q_type}')
return data
processed_data = {}
for key, value in data.items():
# Workaround for BaseModel.construct(**kwargs).
# It doesn't automatically rename fields according to aliases
if key in ['json', 'filter', 'type', 'hash']:
key = key + "_"
elif key == '@extra':
key = 'EXTRA'
processed_data[key] = BaseObject.read(value)
del data
return object_class.read(processed_data)
```
#### File: api/functions/accept_call.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import CallProtocol
class AcceptCall(BaseObject):
"""
Accepts an incoming call
:param call_id: Call identifier
:type call_id: :class:`int`
:param protocol: Description of the call protocols supported by the application
:type protocol: :class:`CallProtocol`
"""
ID: str = Field("acceptCall", alias="@type")
call_id: int
protocol: CallProtocol
@staticmethod
def read(q: dict) -> AcceptCall:
return AcceptCall.construct(**q)
```
#### File: api/functions/accept_terms_of_service.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class AcceptTermsOfService(BaseObject):
"""
Accepts Telegram terms of services
:param terms_of_service_id: Terms of service identifier
:type terms_of_service_id: :class:`str`
"""
ID: str = Field("acceptTermsOfService", alias="@type")
terms_of_service_id: str
@staticmethod
def read(q: dict) -> AcceptTermsOfService:
return AcceptTermsOfService.construct(**q)
```
#### File: api/functions/add_chat_to_list.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import ChatList
class AddChatToList(BaseObject):
"""
Adds a chat to a chat list. A chat can't be simultaneously in Main and Archive chat lists, so it is automatically removed from another one if needed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param chat_list: The chat list. Use getChatListsToAddChat to get suitable chat lists
:type chat_list: :class:`ChatList`
"""
ID: str = Field("addChatToList", alias="@type")
chat_id: int
chat_list: ChatList
@staticmethod
def read(q: dict) -> AddChatToList:
return AddChatToList.construct(**q)
```
#### File: api/functions/add_network_statistics.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import NetworkStatisticsEntry
class AddNetworkStatistics(BaseObject):
"""
Adds the specified data to data usage statistics. Can be called before authorization
:param entry: The network statistics entry with the data to be added to statistics
:type entry: :class:`NetworkStatisticsEntry`
"""
ID: str = Field("addNetworkStatistics", alias="@type")
entry: NetworkStatisticsEntry
@staticmethod
def read(q: dict) -> AddNetworkStatistics:
return AddNetworkStatistics.construct(**q)
```
#### File: api/functions/add_saved_animation.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import InputFile
class AddSavedAnimation(BaseObject):
"""
Manually adds a new animation to the list of saved animations. The new animation is added to the beginning of the list. If the animation was already in the list, it is removed first. Only non-secret video animations with MIME type "video/mp4" can be added to the list
:param animation: The animation file to be added. Only animations known to the server (i.e., successfully sent via a message) can be added to the list
:type animation: :class:`InputFile`
"""
ID: str = Field("addSavedAnimation", alias="@type")
animation: InputFile
@staticmethod
def read(q: dict) -> AddSavedAnimation:
return AddSavedAnimation.construct(**q)
```
#### File: api/functions/change_phone_number.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import PhoneNumberAuthenticationSettings
class ChangePhoneNumber(BaseObject):
"""
Changes the phone number of the user and sends an authentication code to the user's new phone number. On success, returns information about the sent code
:param phone_number: The new phone number of the user in international format
:type phone_number: :class:`str`
:param settings: Settings for the authentication of the user's phone number; pass null to use default settings
:type settings: :class:`PhoneNumberAuthenticationSettings`
"""
ID: str = Field("changePhoneNumber", alias="@type")
phone_number: str
settings: PhoneNumberAuthenticationSettings
@staticmethod
def read(q: dict) -> ChangePhoneNumber:
return ChangePhoneNumber.construct(**q)
```
#### File: api/functions/check_authentication_code.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class CheckAuthenticationCode(BaseObject):
"""
Checks the authentication code. Works only when the current authorization state is authorizationStateWaitCode
:param code: The verification code received via SMS, Telegram message, phone call, or flash call
:type code: :class:`str`
"""
ID: str = Field("checkAuthenticationCode", alias="@type")
code: str
@staticmethod
def read(q: dict) -> CheckAuthenticationCode:
return CheckAuthenticationCode.construct(**q)
```
#### File: api/functions/clear_recent_stickers.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ClearRecentStickers(BaseObject):
"""
Clears the list of recently used stickers
:param is_attached: Pass true to clear the list of stickers recently attached to photo or video files; pass false to clear the list of recently sent stickers
:type is_attached: :class:`bool`
"""
ID: str = Field("clearRecentStickers", alias="@type")
is_attached: bool
@staticmethod
def read(q: dict) -> ClearRecentStickers:
return ClearRecentStickers.construct(**q)
```
#### File: api/functions/create_new_sticker_set.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import InputSticker
class CreateNewStickerSet(BaseObject):
"""
Creates a new sticker set. Returns the newly created sticker set
:param user_id: Sticker set owner; ignored for regular users
:type user_id: :class:`int`
:param title: Sticker set title; 1-64 characters
:type title: :class:`str`
:param name: Sticker set name. Can contain only English letters, digits and underscores. Must end with *"_by_<bot username>"* (*<bot_username>* is case insensitive) for bots; 1-64 characters
:type name: :class:`str`
:param is_masks: True, if stickers are masks. Animated stickers can't be masks
:type is_masks: :class:`bool`
:param stickers: List of stickers to be added to the set; must be non-empty. All stickers must be of the same type. For animated stickers, uploadStickerFile must be used before the sticker is shown
:type stickers: :class:`list[InputSticker]`
:param source: Source of the sticker set; may be empty if unknown
:type source: :class:`str`
"""
ID: str = Field("createNewStickerSet", alias="@type")
user_id: int
title: str = Field(..., min_length=1, max_length=64)
name: str = Field(..., min_length=1, max_length=64)
is_masks: bool
stickers: list[InputSticker]
source: str
@staticmethod
def read(q: dict) -> CreateNewStickerSet:
return CreateNewStickerSet.construct(**q)
```
#### File: api/functions/delete_all_revoked_chat_invite_links.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class DeleteAllRevokedChatInviteLinks(BaseObject):
"""
Deletes all revoked chat invite links created by a given chat administrator. Requires administrator privileges and can_invite_users right in the chat for own links and owner privileges for other links
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param creator_user_id: User identifier of a chat administrator, which links will be deleted. Must be an identifier of the current user for non-owner
:type creator_user_id: :class:`int`
"""
ID: str = Field("deleteAllRevokedChatInviteLinks", alias="@type")
chat_id: int
creator_user_id: int
@staticmethod
def read(q: dict) -> DeleteAllRevokedChatInviteLinks:
return DeleteAllRevokedChatInviteLinks.construct(**q)
```
#### File: api/functions/delete_chat_history.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class DeleteChatHistory(BaseObject):
"""
Deletes all messages in the chat. Use chat.can_be_deleted_only_for_self and chat.can_be_deleted_for_all_users fields to find whether and how the method can be applied to the chat
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param remove_from_chat_list: Pass true if the chat needs to be removed from the chat list
:type remove_from_chat_list: :class:`bool`
:param revoke: Pass true to try to delete chat history for all users
:type revoke: :class:`bool`
"""
ID: str = Field("deleteChatHistory", alias="@type")
chat_id: int
remove_from_chat_list: bool
revoke: bool
@staticmethod
def read(q: dict) -> DeleteChatHistory:
return DeleteChatHistory.construct(**q)
```
#### File: api/functions/delete_chat_reply_markup.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class DeleteChatReplyMarkup(BaseObject):
"""
Deletes the default reply markup from a chat. Must be called after a one-time keyboard or a ForceReply reply markup has been used. UpdateChatReplyMarkup will be sent if the reply markup is changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_id: The message identifier of the used keyboard
:type message_id: :class:`int`
"""
ID: str = Field("deleteChatReplyMarkup", alias="@type")
chat_id: int
message_id: int
@staticmethod
def read(q: dict) -> DeleteChatReplyMarkup:
return DeleteChatReplyMarkup.construct(**q)
```
#### File: api/functions/delete_revoked_chat_invite_link.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class DeleteRevokedChatInviteLink(BaseObject):
"""
Deletes revoked chat invite links. Requires administrator privileges and can_invite_users right in the chat for own links and owner privileges for other links
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param invite_link: Invite link to revoke
:type invite_link: :class:`str`
"""
ID: str = Field("deleteRevokedChatInviteLink", alias="@type")
chat_id: int
invite_link: str
@staticmethod
def read(q: dict) -> DeleteRevokedChatInviteLink:
return DeleteRevokedChatInviteLink.construct(**q)
```
#### File: api/functions/edit_message_caption.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import FormattedText
from ..types import ReplyMarkup
class EditMessageCaption(BaseObject):
"""
Edits the message content caption. Returns the edited message after the edit is completed on the server side
:param chat_id: The chat the message belongs to
:type chat_id: :class:`int`
:param message_id: Identifier of the message
:type message_id: :class:`int`
:param reply_markup: The new message reply markup; pass null if none; for bots only
:type reply_markup: :class:`ReplyMarkup`
:param caption: New message content caption; 0-GetOption("message_caption_length_max") characters; pass null to remove caption
:type caption: :class:`FormattedText`
"""
ID: str = Field("editMessageCaption", alias="@type")
chat_id: int
message_id: int
reply_markup: ReplyMarkup
caption: FormattedText
@staticmethod
def read(q: dict) -> EditMessageCaption:
return EditMessageCaption.construct(**q)
```
#### File: api/functions/edit_message_live_location.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import Location
from ..types import ReplyMarkup
class EditMessageLiveLocation(BaseObject):
"""
Edits the message content of a live location. Messages can be edited for a limited period of time specified in the live location. Returns the edited message after the edit is completed on the server side
:param chat_id: The chat the message belongs to
:type chat_id: :class:`int`
:param message_id: Identifier of the message
:type message_id: :class:`int`
:param reply_markup: The new message reply markup; pass null if none; for bots only
:type reply_markup: :class:`ReplyMarkup`
:param location: New location content of the message; pass null to stop sharing the live location
:type location: :class:`Location`
:param heading: The new direction in which the location moves, in degrees; 1-360. Pass 0 if unknown
:type heading: :class:`int`
:param proximity_alert_radius: The new maximum distance for proximity alerts, in meters (0-100000). Pass 0 if the notification is disabled
:type proximity_alert_radius: :class:`int`
"""
ID: str = Field("editMessageLiveLocation", alias="@type")
chat_id: int
message_id: int
reply_markup: ReplyMarkup
location: Location
heading: int
proximity_alert_radius: int
@staticmethod
def read(q: dict) -> EditMessageLiveLocation:
return EditMessageLiveLocation.construct(**q)
```
#### File: api/functions/edit_message_media.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import InputMessageContent
from ..types import ReplyMarkup
class EditMessageMedia(BaseObject):
"""
Edits the content of a message with an animation, an audio, a document, a photo or a video, including message caption. If only the caption needs to be edited, use editMessageCaption instead. The media can't be edited if the message was set to self-destruct or to a self-destructing media. The type of message content in an album can't be changed with exception of replacing a photo with a video or vice versa. Returns the edited message after the edit is completed on the server side
:param chat_id: The chat the message belongs to
:type chat_id: :class:`int`
:param message_id: Identifier of the message
:type message_id: :class:`int`
:param reply_markup: The new message reply markup; pass null if none; for bots only
:type reply_markup: :class:`ReplyMarkup`
:param input_message_content: New content of the message. Must be one of the following types: inputMessageAnimation, inputMessageAudio, inputMessageDocument, inputMessagePhoto or inputMessageVideo
:type input_message_content: :class:`InputMessageContent`
"""
ID: str = Field("editMessageMedia", alias="@type")
chat_id: int
message_id: int
reply_markup: ReplyMarkup
input_message_content: InputMessageContent
@staticmethod
def read(q: dict) -> EditMessageMedia:
return EditMessageMedia.construct(**q)
```
#### File: api/functions/edit_message_reply_markup.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import ReplyMarkup
class EditMessageReplyMarkup(BaseObject):
"""
Edits the message reply markup; for bots only. Returns the edited message after the edit is completed on the server side
:param chat_id: The chat the message belongs to
:type chat_id: :class:`int`
:param message_id: Identifier of the message
:type message_id: :class:`int`
:param reply_markup: The new message reply markup; pass null if none
:type reply_markup: :class:`ReplyMarkup`
"""
ID: str = Field("editMessageReplyMarkup", alias="@type")
chat_id: int
message_id: int
reply_markup: ReplyMarkup
@staticmethod
def read(q: dict) -> EditMessageReplyMarkup:
return EditMessageReplyMarkup.construct(**q)
```
#### File: api/functions/end_group_call_recording.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class EndGroupCallRecording(BaseObject):
"""
Ends recording of an active group call. Requires groupCall.can_be_managed group call flag
:param group_call_id: Group call identifier
:type group_call_id: :class:`int`
"""
ID: str = Field("endGroupCallRecording", alias="@type")
group_call_id: int
@staticmethod
def read(q: dict) -> EndGroupCallRecording:
return EndGroupCallRecording.construct(**q)
```
#### File: api/functions/get_basic_group_full_info.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetBasicGroupFullInfo(BaseObject):
"""
Returns full information about a basic group by its identifier
:param basic_group_id: Basic group identifier
:type basic_group_id: :class:`int`
"""
ID: str = Field("getBasicGroupFullInfo", alias="@type")
basic_group_id: int
@staticmethod
def read(q: dict) -> GetBasicGroupFullInfo:
return GetBasicGroupFullInfo.construct(**q)
```
#### File: api/functions/get_basic_group.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetBasicGroup(BaseObject):
"""
Returns information about a basic group by its identifier. This is an offline request if the current user is not a bot
:param basic_group_id: Basic group identifier
:type basic_group_id: :class:`int`
"""
ID: str = Field("getBasicGroup", alias="@type")
basic_group_id: int
@staticmethod
def read(q: dict) -> GetBasicGroup:
return GetBasicGroup.construct(**q)
```
#### File: api/functions/get_callback_query_message.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetCallbackQueryMessage(BaseObject):
"""
Returns information about a message with the callback button that originated a callback query; for bots only
:param chat_id: Identifier of the chat the message belongs to
:type chat_id: :class:`int`
:param message_id: Message identifier
:type message_id: :class:`int`
:param callback_query_id: Identifier of the callback query
:type callback_query_id: :class:`int`
"""
ID: str = Field("getCallbackQueryMessage", alias="@type")
chat_id: int
message_id: int
callback_query_id: int
@staticmethod
def read(q: dict) -> GetCallbackQueryMessage:
return GetCallbackQueryMessage.construct(**q)
```
#### File: api/functions/get_chat_invite_link.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetChatInviteLink(BaseObject):
"""
Returns information about an invite link. Requires administrator privileges and can_invite_users right in the chat to get own links and owner privileges to get other links
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param invite_link: Invite link to get
:type invite_link: :class:`str`
"""
ID: str = Field("getChatInviteLink", alias="@type")
chat_id: int
invite_link: str
@staticmethod
def read(q: dict) -> GetChatInviteLink:
return GetChatInviteLink.construct(**q)
```
#### File: api/functions/get_chat_pinned_message.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetChatPinnedMessage(BaseObject):
"""
Returns information about a newest pinned message in the chat
:param chat_id: Identifier of the chat the message belongs to
:type chat_id: :class:`int`
"""
ID: str = Field("getChatPinnedMessage", alias="@type")
chat_id: int
@staticmethod
def read(q: dict) -> GetChatPinnedMessage:
return GetChatPinnedMessage.construct(**q)
```
#### File: api/functions/get_chat_sponsored_messages.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetChatSponsoredMessages(BaseObject):
"""
Returns sponsored messages to be shown in a chat; for channel chats only
:param chat_id: Identifier of the chat
:type chat_id: :class:`int`
"""
ID: str = Field("getChatSponsoredMessages", alias="@type")
chat_id: int
@staticmethod
def read(q: dict) -> GetChatSponsoredMessages:
return GetChatSponsoredMessages.construct(**q)
```
#### File: api/functions/get_connected_websites.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetConnectedWebsites(BaseObject):
"""
Returns all website where the current user used Telegram to log in
"""
ID: str = Field("getConnectedWebsites", alias="@type")
@staticmethod
def read(q: dict) -> GetConnectedWebsites:
return GetConnectedWebsites.construct(**q)
```
#### File: api/functions/get_contacts.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetContacts(BaseObject):
"""
Returns all user contacts
"""
ID: str = Field("getContacts", alias="@type")
@staticmethod
def read(q: dict) -> GetContacts:
return GetContacts.construct(**q)
```
#### File: api/functions/get_deep_link_info.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetDeepLinkInfo(BaseObject):
"""
Returns information about a tg:// deep link. Use "tg://need_update_for_some_feature" or "tg:some_unsupported_feature" for testing. Returns a 404 error for unknown links. Can be called before authorization
:param link: The link
:type link: :class:`str`
"""
ID: str = Field("getDeepLinkInfo", alias="@type")
link: str
@staticmethod
def read(q: dict) -> GetDeepLinkInfo:
return GetDeepLinkInfo.construct(**q)
```
#### File: api/functions/get_emoji_suggestions_url.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetEmojiSuggestionsUrl(BaseObject):
"""
Returns an HTTP URL which can be used to automatically log in to the translation platform and suggest new emoji replacements. The URL will be valid for 30 seconds after generation
:param language_code: Language code for which the emoji replacements will be suggested
:type language_code: :class:`str`
"""
ID: str = Field("getEmojiSuggestionsUrl", alias="@type")
language_code: str
@staticmethod
def read(q: dict) -> GetEmojiSuggestionsUrl:
return GetEmojiSuggestionsUrl.construct(**q)
```
#### File: api/functions/get_external_link_info.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetExternalLinkInfo(BaseObject):
"""
Returns information about an action to be done when the current user clicks an external link. Don't use this method for links from secret chats if web page preview is disabled in secret chats
:param link: The link
:type link: :class:`str`
"""
ID: str = Field("getExternalLinkInfo", alias="@type")
link: str
@staticmethod
def read(q: dict) -> GetExternalLinkInfo:
return GetExternalLinkInfo.construct(**q)
```
#### File: api/functions/get_file_mime_type.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetFileMimeType(BaseObject):
"""
Returns the MIME type of a file, guessed by its extension. Returns an empty string on failure. Can be called synchronously
:param file_name: The name of the file or path to the file
:type file_name: :class:`str`
"""
ID: str = Field("getFileMimeType", alias="@type")
file_name: str
@staticmethod
def read(q: dict) -> GetFileMimeType:
return GetFileMimeType.construct(**q)
```
#### File: api/functions/get_file.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetFile(BaseObject):
"""
Returns information about a file; this is an offline request
:param file_id: Identifier of the file to get
:type file_id: :class:`int`
"""
ID: str = Field("getFile", alias="@type")
file_id: int
@staticmethod
def read(q: dict) -> GetFile:
return GetFile.construct(**q)
```
#### File: api/functions/get_group_call_invite_link.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetGroupCallInviteLink(BaseObject):
"""
Returns invite link to a voice chat in a public chat
:param group_call_id: Group call identifier
:type group_call_id: :class:`int`
:param can_self_unmute: Pass true if the invite link needs to contain an invite hash, passing which to joinGroupCall would allow the invited user to unmute themselves. Requires groupCall.can_be_managed group call flag
:type can_self_unmute: :class:`bool`
"""
ID: str = Field("getGroupCallInviteLink", alias="@type")
group_call_id: int
can_self_unmute: bool
@staticmethod
def read(q: dict) -> GetGroupCallInviteLink:
return GetGroupCallInviteLink.construct(**q)
```
#### File: api/functions/get_installed_sticker_sets.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetInstalledStickerSets(BaseObject):
"""
Returns a list of installed sticker sets
:param is_masks: Pass true to return mask sticker sets; pass false to return ordinary sticker sets
:type is_masks: :class:`bool`
"""
ID: str = Field("getInstalledStickerSets", alias="@type")
is_masks: bool
@staticmethod
def read(q: dict) -> GetInstalledStickerSets:
return GetInstalledStickerSets.construct(**q)
```
#### File: api/functions/get_language_pack_string.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetLanguagePackString(BaseObject):
"""
Returns a string stored in the local database from the specified localization target and language pack by its key. Returns a 404 error if the string is not found. Can be called synchronously
:param language_pack_database_path: Path to the language pack database in which strings are stored
:type language_pack_database_path: :class:`str`
:param localization_target: Localization target to which the language pack belongs
:type localization_target: :class:`str`
:param language_pack_id: Language pack identifier
:type language_pack_id: :class:`str`
:param key: Language pack key of the string to be returned
:type key: :class:`str`
"""
ID: str = Field("getLanguagePackString", alias="@type")
language_pack_database_path: str
localization_target: str
language_pack_id: str
key: str
@staticmethod
def read(q: dict) -> GetLanguagePackString:
return GetLanguagePackString.construct(**q)
```
#### File: api/functions/get_login_url.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetLoginUrl(BaseObject):
"""
Returns an HTTP URL which can be used to automatically authorize the user on a website after clicking an inline button of type inlineKeyboardButtonTypeLoginUrl. Use the method getLoginUrlInfo to find whether a prior user confirmation is needed. If an error is returned, then the button must be handled as an ordinary URL button
:param chat_id: Chat identifier of the message with the button
:type chat_id: :class:`int`
:param message_id: Message identifier of the message with the button
:type message_id: :class:`int`
:param button_id: Button identifier
:type button_id: :class:`int`
:param allow_write_access: True, if the user allowed the bot to send them messages
:type allow_write_access: :class:`bool`
"""
ID: str = Field("getLoginUrl", alias="@type")
chat_id: int
message_id: int
button_id: int
allow_write_access: bool
@staticmethod
def read(q: dict) -> GetLoginUrl:
return GetLoginUrl.construct(**q)
```
#### File: api/functions/get_message_thread_history.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetMessageThreadHistory(BaseObject):
"""
Returns messages in a message thread of a message. Can be used only if message.can_get_message_thread == true. Message thread of a channel message is in the channel's linked supergroup. The messages are returned in a reverse chronological order (i.e., in order of decreasing message_id). For optimal performance, the number of returned messages is chosen by TDLib
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_id: Message identifier, which thread history needs to be returned
:type message_id: :class:`int`
:param from_message_id: Identifier of the message starting from which history must be fetched; use 0 to get results from the last message
:type from_message_id: :class:`int`
:param offset: Specify 0 to get results from exactly the from_message_id or a negative offset up to 99 to get additionally some newer messages
:type offset: :class:`int`
:param limit: The maximum number of messages to be returned; must be positive and can't be greater than 100. If the offset is negative, the limit must be greater than or equal to -offset. For optimal performance, the number of returned messages is chosen by TDLib and can be smaller than the specified limit
:type limit: :class:`int`
"""
ID: str = Field("getMessageThreadHistory", alias="@type")
chat_id: int
message_id: int
from_message_id: int
offset: int
limit: int
@staticmethod
def read(q: dict) -> GetMessageThreadHistory:
return GetMessageThreadHistory.construct(**q)
```
#### File: api/functions/get_option.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetOption(BaseObject):
"""
Returns the value of an option by its name. (Check the list of available options on https://core.telegram.org/tdlib/options.) Can be called before authorization
:param name: The name of the option
:type name: :class:`str`
"""
ID: str = Field("getOption", alias="@type")
name: str
@staticmethod
def read(q: dict) -> GetOption:
return GetOption.construct(**q)
```
#### File: api/functions/get_payment_receipt.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetPaymentReceipt(BaseObject):
"""
Returns information about a successful payment
:param chat_id: Chat identifier of the PaymentSuccessful message
:type chat_id: :class:`int`
:param message_id: Message identifier
:type message_id: :class:`int`
"""
ID: str = Field("getPaymentReceipt", alias="@type")
chat_id: int
message_id: int
@staticmethod
def read(q: dict) -> GetPaymentReceipt:
return GetPaymentReceipt.construct(**q)
```
#### File: api/functions/get_phone_number_info.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetPhoneNumberInfo(BaseObject):
"""
Returns information about a phone number by its prefix. Can be called before authorization
:param phone_number_prefix: The phone number prefix
:type phone_number_prefix: :class:`str`
"""
ID: str = Field("getPhoneNumberInfo", alias="@type")
phone_number_prefix: str
@staticmethod
def read(q: dict) -> GetPhoneNumberInfo:
return GetPhoneNumberInfo.construct(**q)
```
#### File: api/functions/get_proxy_link.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetProxyLink(BaseObject):
"""
Returns an HTTPS link, which can be used to add a proxy. Available only for SOCKS5 and MTProto proxies. Can be called before authorization
:param proxy_id: Proxy identifier
:type proxy_id: :class:`int`
"""
ID: str = Field("getProxyLink", alias="@type")
proxy_id: int
@staticmethod
def read(q: dict) -> GetProxyLink:
return GetProxyLink.construct(**q)
```
#### File: api/functions/get_recently_opened_chats.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetRecentlyOpenedChats(BaseObject):
"""
Returns recently opened chats, this is an offline request. Returns chats in the order of last opening
:param limit: The maximum number of chats to be returned
:type limit: :class:`int`
"""
ID: str = Field("getRecentlyOpenedChats", alias="@type")
limit: int
@staticmethod
def read(q: dict) -> GetRecentlyOpenedChats:
return GetRecentlyOpenedChats.construct(**q)
```
#### File: api/functions/get_recommended_chat_filters.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetRecommendedChatFilters(BaseObject):
"""
Returns recommended chat filters for the current user
"""
ID: str = Field("getRecommendedChatFilters", alias="@type")
@staticmethod
def read(q: dict) -> GetRecommendedChatFilters:
return GetRecommendedChatFilters.construct(**q)
```
#### File: api/functions/get_scope_notification_settings.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import NotificationSettingsScope
class GetScopeNotificationSettings(BaseObject):
"""
Returns the notification settings for chats of a given type
:param scope: Types of chats for which to return the notification settings information
:type scope: :class:`NotificationSettingsScope`
"""
ID: str = Field("getScopeNotificationSettings", alias="@type")
scope: NotificationSettingsScope
@staticmethod
def read(q: dict) -> GetScopeNotificationSettings:
return GetScopeNotificationSettings.construct(**q)
```
#### File: api/functions/get_stickers.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetStickers(BaseObject):
"""
Returns stickers from the installed sticker sets that correspond to a given emoji. If the emoji is non-empty, favorite and recently used stickers may also be returned
:param emoji: String representation of emoji. If empty, returns all known installed stickers
:type emoji: :class:`str`
:param limit: The maximum number of stickers to be returned
:type limit: :class:`int`
"""
ID: str = Field("getStickers", alias="@type")
emoji: str
limit: int
@staticmethod
def read(q: dict) -> GetStickers:
return GetStickers.construct(**q)
```
#### File: api/functions/get_storage_statistics.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetStorageStatistics(BaseObject):
"""
Returns storage usage statistics. Can be called before authorization
:param chat_limit: The maximum number of chats with the largest storage usage for which separate statistics need to be returned. All other chats will be grouped in entries with chat_id == 0. If the chat info database is not used, the chat_limit is ignored and is always set to 0
:type chat_limit: :class:`int`
"""
ID: str = Field("getStorageStatistics", alias="@type")
chat_limit: int
@staticmethod
def read(q: dict) -> GetStorageStatistics:
return GetStorageStatistics.construct(**q)
```
#### File: api/functions/get_suitable_discussion_chats.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetSuitableDiscussionChats(BaseObject):
"""
Returns a list of basic group and supergroup chats, which can be used as a discussion group for a channel. Returned basic group chats must be first upgraded to supergroups before they can be set as a discussion group. To set a returned supergroup as a discussion group, access to its old messages must be enabled using toggleSupergroupIsAllHistoryAvailable first
"""
ID: str = Field("getSuitableDiscussionChats", alias="@type")
@staticmethod
def read(q: dict) -> GetSuitableDiscussionChats:
return GetSuitableDiscussionChats.construct(**q)
```
#### File: api/functions/get_text_entities.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetTextEntities(BaseObject):
"""
Returns all entities (mentions, hashtags, cashtags, bot commands, bank card numbers, URLs, and email addresses) contained in the text. Can be called synchronously
:param text: The text in which to look for entites
:type text: :class:`str`
"""
ID: str = Field("getTextEntities", alias="@type")
text: str
@staticmethod
def read(q: dict) -> GetTextEntities:
return GetTextEntities.construct(**q)
```
#### File: api/functions/get_voice_chat_available_participants.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GetVoiceChatAvailableParticipants(BaseObject):
"""
Returns list of participant identifiers, which can be used to join voice chats in a chat
:param chat_id: Chat identifier
:type chat_id: :class:`int`
"""
ID: str = Field("getVoiceChatAvailableParticipants", alias="@type")
chat_id: int
@staticmethod
def read(q: dict) -> GetVoiceChatAvailableParticipants:
return GetVoiceChatAvailableParticipants.construct(**q)
```
#### File: api/functions/join_chat.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class JoinChat(BaseObject):
"""
Adds the current user as a new member to a chat. Private and secret chats can't be joined using this method
:param chat_id: Chat identifier
:type chat_id: :class:`int`
"""
ID: str = Field("joinChat", alias="@type")
chat_id: int
@staticmethod
def read(q: dict) -> JoinChat:
return JoinChat.construct(**q)
```
#### File: api/functions/log_out.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class LogOut(BaseObject):
"""
Closes the TDLib instance after a proper logout. Requires an available network connection. All local data will be destroyed. After the logout completes, updateAuthorizationState with authorizationStateClosed will be sent
"""
ID: str = Field("logOut", alias="@type")
@staticmethod
def read(q: dict) -> LogOut:
return LogOut.construct(**q)
```
#### File: api/functions/read_all_chat_mentions.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ReadAllChatMentions(BaseObject):
"""
Marks all mentions in a chat as read
:param chat_id: Chat identifier
:type chat_id: :class:`int`
"""
ID: str = Field("readAllChatMentions", alias="@type")
chat_id: int
@staticmethod
def read(q: dict) -> ReadAllChatMentions:
return ReadAllChatMentions.construct(**q)
```
#### File: api/functions/read_file_part.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ReadFilePart(BaseObject):
"""
Reads a part of a file from the TDLib file cache and returns read bytes. This method is intended to be used only if the application has no direct access to TDLib's file system, because it is usually slower than a direct read from the file
:param file_id: Identifier of the file. The file must be located in the TDLib file cache
:type file_id: :class:`int`
:param offset: The offset from which to read the file
:type offset: :class:`int`
:param count: Number of bytes to read. An error will be returned if there are not enough bytes available in the file from the specified position. Pass 0 to read all available data from the specified position
:type count: :class:`int`
"""
ID: str = Field("readFilePart", alias="@type")
file_id: int
offset: int
count: int
@staticmethod
def read(q: dict) -> ReadFilePart:
return ReadFilePart.construct(**q)
```
#### File: api/functions/register_device.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import DeviceToken
class RegisterDevice(BaseObject):
"""
Registers the currently used device for receiving push notifications. Returns a globally unique identifier of the push notification subscription
:param device_token: Device token
:type device_token: :class:`DeviceToken`
:param other_user_ids: List of user identifiers of other users currently using the application
:type other_user_ids: :class:`list[int]`
"""
ID: str = Field("registerDevice", alias="@type")
device_token: DeviceToken
other_user_ids: list[int]
@staticmethod
def read(q: dict) -> RegisterDevice:
return RegisterDevice.construct(**q)
```
#### File: api/functions/remove_notification_group.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class RemoveNotificationGroup(BaseObject):
"""
Removes a group of active notifications. Needs to be called only if the notification group is removed by the current user
:param notification_group_id: Notification group identifier
:type notification_group_id: :class:`int`
:param max_notification_id: The maximum identifier of removed notifications
:type max_notification_id: :class:`int`
"""
ID: str = Field("removeNotificationGroup", alias="@type")
notification_group_id: int
max_notification_id: int
@staticmethod
def read(q: dict) -> RemoveNotificationGroup:
return RemoveNotificationGroup.construct(**q)
```
#### File: api/functions/remove_saved_animation.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import InputFile
class RemoveSavedAnimation(BaseObject):
"""
Removes an animation from the list of saved animations
:param animation: Animation file to be removed
:type animation: :class:`InputFile`
"""
ID: str = Field("removeSavedAnimation", alias="@type")
animation: InputFile
@staticmethod
def read(q: dict) -> RemoveSavedAnimation:
return RemoveSavedAnimation.construct(**q)
```
#### File: api/functions/replace_primary_chat_invite_link.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ReplacePrimaryChatInviteLink(BaseObject):
"""
Replaces current primary invite link for a chat with a new primary invite link. Available for basic groups, supergroups, and channels. Requires administrator privileges and can_invite_users right
:param chat_id: Chat identifier
:type chat_id: :class:`int`
"""
ID: str = Field("replacePrimaryChatInviteLink", alias="@type")
chat_id: int
@staticmethod
def read(q: dict) -> ReplacePrimaryChatInviteLink:
return ReplacePrimaryChatInviteLink.construct(**q)
```
#### File: api/functions/report_chat_photo.py
```python
from __future__ import annotations
import typing
from pydantic import Field
from ..base_object import BaseObject
from ..types import ChatReportReason
class ReportChatPhoto(BaseObject):
"""
Reports a chat photo to the Telegram moderators. A chat photo can be reported only if this is a private chat with a bot, a private chat with a user sharing their location, a supergroup, or a channel, since other chats can't be checked by moderators
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param file_id: Identifier of the photo to report. Only full photos from chatPhoto can be reported
:type file_id: :class:`int`
:param reason: The reason for reporting the chat photo
:type reason: :class:`ChatReportReason`
:param text: Additional report details; 0-1024 characters, defaults to None
:type text: :class:`str`, optional
"""
ID: str = Field("reportChatPhoto", alias="@type")
chat_id: int
file_id: int
reason: ChatReportReason
text: typing.Optional[str] = Field(None, max_length=1024)
@staticmethod
def read(q: dict) -> ReportChatPhoto:
return ReportChatPhoto.construct(**q)
```
#### File: api/functions/request_qr_code_authentication.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class RequestQrCodeAuthentication(BaseObject):
"""
Requests QR code authentication by scanning a QR code on another logged in device. Works only when the current authorization state is authorizationStateWaitPhoneNumber, or if there is no pending authentication query and the current authorization state is authorizationStateWaitCode, authorizationStateWaitRegistration, or authorizationStateWaitPassword
:param other_user_ids: List of user identifiers of other users currently using the application
:type other_user_ids: :class:`list[int]`
"""
ID: str = Field("requestQrCodeAuthentication", alias="@type")
other_user_ids: list[int]
@staticmethod
def read(q: dict) -> RequestQrCodeAuthentication:
return RequestQrCodeAuthentication.construct(**q)
```
#### File: api/functions/search_secret_messages.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import SearchMessagesFilter
class SearchSecretMessages(BaseObject):
"""
Searches for messages in secret chats. Returns the results in reverse chronological order. For optimal performance, the number of returned messages is chosen by TDLib
:param chat_id: Identifier of the chat in which to search. Specify 0 to search in all secret chats
:type chat_id: :class:`int`
:param query: Query to search for. If empty, searchChatMessages must be used instead
:type query: :class:`str`
:param offset: Offset of the first entry to return as received from the previous request; use empty string to get first chunk of results
:type offset: :class:`str`
:param limit: The maximum number of messages to be returned; up to 100. For optimal performance, the number of returned messages is chosen by TDLib and can be smaller than the specified limit
:type limit: :class:`int`
:param filter_: Additional filter for messages to search; pass null to search for all messages
:type filter_: :class:`SearchMessagesFilter`
"""
ID: str = Field("searchSecretMessages", alias="@type")
chat_id: int
query: str
offset: str
limit: int
filter_: SearchMessagesFilter = Field(..., alias='filter')
@staticmethod
def read(q: dict) -> SearchSecretMessages:
return SearchSecretMessages.construct(**q)
```
#### File: api/functions/send_call_debug_information.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class SendCallDebugInformation(BaseObject):
"""
Sends debug information for a call
:param call_id: Call identifier
:type call_id: :class:`int`
:param debug_information: Debug information in application-specific format
:type debug_information: :class:`str`
"""
ID: str = Field("sendCallDebugInformation", alias="@type")
call_id: int
debug_information: str
@staticmethod
def read(q: dict) -> SendCallDebugInformation:
return SendCallDebugInformation.construct(**q)
```
#### File: api/functions/send_call_signaling_data.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class SendCallSignalingData(BaseObject):
"""
Sends call signaling data
:param call_id: Call identifier
:type call_id: :class:`int`
:param data: The data
:type data: :class:`str`
"""
ID: str = Field("sendCallSignalingData", alias="@type")
call_id: int
data: str
@staticmethod
def read(q: dict) -> SendCallSignalingData:
return SendCallSignalingData.construct(**q)
```
#### File: api/functions/send_message.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import InputMessageContent
from ..types import MessageSendOptions
from ..types import ReplyMarkup
class SendMessage(BaseObject):
"""
Sends a message. Returns the sent message
:param chat_id: Target chat
:type chat_id: :class:`int`
:param message_thread_id: If not 0, a message thread identifier in which the message will be sent
:type message_thread_id: :class:`int`
:param reply_to_message_id: Identifier of the message to reply to or 0
:type reply_to_message_id: :class:`int`
:param options: Options to be used to send the message; pass null to use default options
:type options: :class:`MessageSendOptions`
:param reply_markup: Markup for replying to the message; pass null if none; for bots only
:type reply_markup: :class:`ReplyMarkup`
:param input_message_content: The content of the message to be sent
:type input_message_content: :class:`InputMessageContent`
"""
ID: str = Field("sendMessage", alias="@type")
chat_id: int
message_thread_id: int
reply_to_message_id: int
options: MessageSendOptions
reply_markup: ReplyMarkup
input_message_content: InputMessageContent
@staticmethod
def read(q: dict) -> SendMessage:
return SendMessage.construct(**q)
```
#### File: api/functions/send_passport_authorization_form.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import PassportElementType
class SendPassportAuthorizationForm(BaseObject):
"""
Sends a Telegram Passport authorization form, effectively sharing data with the service. This method must be called after getPassportAuthorizationFormAvailableElements if some previously available elements are going to be reused
:param autorization_form_id: Authorization form identifier
:type autorization_form_id: :class:`int`
:param types: Types of Telegram Passport elements chosen by user to complete the authorization form
:type types: :class:`list[PassportElementType]`
"""
ID: str = Field("sendPassportAuthorizationForm", alias="@type")
autorization_form_id: int
types: list[PassportElementType]
@staticmethod
def read(q: dict) -> SendPassportAuthorizationForm:
return SendPassportAuthorizationForm.construct(**q)
```
#### File: api/functions/send_phone_number_confirmation_code.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import PhoneNumberAuthenticationSettings
class SendPhoneNumberConfirmationCode(BaseObject):
"""
Sends phone number confirmation code to handle links of the type internalLinkTypePhoneNumberConfirmation
:param hash_: Hash value from the link
:type hash_: :class:`str`
:param phone_number: Phone number value from the link
:type phone_number: :class:`str`
:param settings: Settings for the authentication of the user's phone number; pass null to use default settings
:type settings: :class:`PhoneNumberAuthenticationSettings`
"""
ID: str = Field("sendPhoneNumberConfirmationCode", alias="@type")
hash_: str = Field(..., alias='hash')
phone_number: str
settings: PhoneNumberAuthenticationSettings
@staticmethod
def read(q: dict) -> SendPhoneNumberConfirmationCode:
return SendPhoneNumberConfirmationCode.construct(**q)
```
#### File: api/functions/send_phone_number_verification_code.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import PhoneNumberAuthenticationSettings
class SendPhoneNumberVerificationCode(BaseObject):
"""
Sends a code to verify a phone number to be added to a user's Telegram Passport
:param phone_number: The phone number of the user, in international format
:type phone_number: :class:`str`
:param settings: Settings for the authentication of the user's phone number; pass null to use default settings
:type settings: :class:`PhoneNumberAuthenticationSettings`
"""
ID: str = Field("sendPhoneNumberVerificationCode", alias="@type")
phone_number: str
settings: PhoneNumberAuthenticationSettings
@staticmethod
def read(q: dict) -> SendPhoneNumberVerificationCode:
return SendPhoneNumberVerificationCode.construct(**q)
```
#### File: api/functions/set_bot_updates_status.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class SetBotUpdatesStatus(BaseObject):
"""
Informs the server about the number of pending bot updates if they haven't been processed for a long time; for bots only
:param pending_update_count: The number of pending updates
:type pending_update_count: :class:`int`
:param error_message: The last error message
:type error_message: :class:`str`
"""
ID: str = Field("setBotUpdatesStatus", alias="@type")
pending_update_count: int
error_message: str
@staticmethod
def read(q: dict) -> SetBotUpdatesStatus:
return SetBotUpdatesStatus.construct(**q)
```
#### File: api/functions/set_chat_discussion_group.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class SetChatDiscussionGroup(BaseObject):
"""
Changes the discussion group of a channel chat; requires can_change_info administrator right in the channel if it is specified
:param chat_id: Identifier of the channel chat. Pass 0 to remove a link from the supergroup passed in the second argument to a linked channel chat (requires can_pin_messages rights in the supergroup)
:type chat_id: :class:`int`
:param discussion_chat_id: Identifier of a new channel's discussion group. Use 0 to remove the discussion group. Use the method getSuitableDiscussionChats to find all suitable groups. Basic group chats must be first upgraded to supergroup chats. If new chat members don't have access to old messages in the supergroup, then toggleSupergroupIsAllHistoryAvailable must be used first to change that
:type discussion_chat_id: :class:`int`
"""
ID: str = Field("setChatDiscussionGroup", alias="@type")
chat_id: int
discussion_chat_id: int
@staticmethod
def read(q: dict) -> SetChatDiscussionGroup:
return SetChatDiscussionGroup.construct(**q)
```
#### File: api/functions/set_chat_theme.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class SetChatTheme(BaseObject):
"""
Changes the chat theme. Supported only in private and secret chats
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param theme_name: Name of the new chat theme; pass an empty string to return the default theme
:type theme_name: :class:`str`
"""
ID: str = Field("setChatTheme", alias="@type")
chat_id: int
theme_name: str
@staticmethod
def read(q: dict) -> SetChatTheme:
return SetChatTheme.construct(**q)
```
#### File: api/functions/set_group_call_participant_is_speaking.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class SetGroupCallParticipantIsSpeaking(BaseObject):
"""
Informs TDLib that speaking state of a participant of an active group has changed
:param group_call_id: Group call identifier
:type group_call_id: :class:`int`
:param audio_source: Group call participant's synchronization audio source identifier, or 0 for the current user
:type audio_source: :class:`int`
:param is_speaking: True, if the user is speaking
:type is_speaking: :class:`bool`
"""
ID: str = Field("setGroupCallParticipantIsSpeaking", alias="@type")
group_call_id: int
audio_source: int
is_speaking: bool
@staticmethod
def read(q: dict) -> SetGroupCallParticipantIsSpeaking:
return SetGroupCallParticipantIsSpeaking.construct(**q)
```
#### File: api/functions/set_group_call_participant_volume_level.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import MessageSender
class SetGroupCallParticipantVolumeLevel(BaseObject):
"""
Changes volume level of a participant of an active group call. If the current user can manage the group call, then the participant's volume level will be changed for all users with the default volume level
:param group_call_id: Group call identifier
:type group_call_id: :class:`int`
:param participant_id: Participant identifier
:type participant_id: :class:`MessageSender`
:param volume_level: New participant's volume level; 1-20000 in hundreds of percents
:type volume_level: :class:`int`
"""
ID: str = Field("setGroupCallParticipantVolumeLevel", alias="@type")
group_call_id: int
participant_id: MessageSender
volume_level: int
@staticmethod
def read(q: dict) -> SetGroupCallParticipantVolumeLevel:
return SetGroupCallParticipantVolumeLevel.construct(**q)
```
#### File: api/functions/set_sticker_position_in_set.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import InputFile
class SetStickerPositionInSet(BaseObject):
"""
Changes the position of a sticker in the set to which it belongs; for bots only. The sticker set must have been created by the bot
:param sticker: Sticker
:type sticker: :class:`InputFile`
:param position: New position of the sticker in the set, zero-based
:type position: :class:`int`
"""
ID: str = Field("setStickerPositionInSet", alias="@type")
sticker: InputFile
position: int
@staticmethod
def read(q: dict) -> SetStickerPositionInSet:
return SetStickerPositionInSet.construct(**q)
```
#### File: api/functions/set_tdlib_parameters.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import TdlibParameters
class SetTdlibParameters(BaseObject):
"""
Sets the parameters for TDLib initialization. Works only when the current authorization state is authorizationStateWaitTdlibParameters
:param parameters: Parameters for TDLib initialization
:type parameters: :class:`TdlibParameters`
"""
ID: str = Field("setTdlibParameters", alias="@type")
parameters: TdlibParameters
@staticmethod
def read(q: dict) -> SetTdlibParameters:
return SetTdlibParameters.construct(**q)
```
#### File: api/functions/set_voice_chat_default_participant.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import MessageSender
class SetVoiceChatDefaultParticipant(BaseObject):
"""
Changes default participant identifier, which can be used to join voice chats in a chat
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param default_participant_id: Default group call participant identifier to join the voice chats
:type default_participant_id: :class:`MessageSender`
"""
ID: str = Field("setVoiceChatDefaultParticipant", alias="@type")
chat_id: int
default_participant_id: MessageSender
@staticmethod
def read(q: dict) -> SetVoiceChatDefaultParticipant:
return SetVoiceChatDefaultParticipant.construct(**q)
```
#### File: api/functions/test_call_empty.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class TestCallEmpty(BaseObject):
"""
Does nothing; for testing only. This is an offline method. Can be called before authorization
"""
ID: str = Field("testCallEmpty", alias="@type")
@staticmethod
def read(q: dict) -> TestCallEmpty:
return TestCallEmpty.construct(**q)
```
#### File: api/functions/test_call_vector_string.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class TestCallVectorString(BaseObject):
"""
Returns the received vector of strings; for testing only. This is an offline method. Can be called before authorization
:param x: Vector of strings to return
:type x: :class:`list[str]`
"""
ID: str = Field("testCallVectorString", alias="@type")
x: list[str]
@staticmethod
def read(q: dict) -> TestCallVectorString:
return TestCallVectorString.construct(**q)
```
#### File: api/functions/test_proxy.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import ProxyType
class TestProxy(BaseObject):
"""
Sends a simple network request to the Telegram servers via proxy; for testing only. Can be called before authorization
:param server: Proxy server IP address
:type server: :class:`str`
:param port: Proxy server port
:type port: :class:`int`
:param type_: Proxy type
:type type_: :class:`ProxyType`
:param dc_id: Identifier of a datacenter, with which to test connection
:type dc_id: :class:`int`
:param timeout: The maximum overall timeout for the request
:type timeout: :class:`float`
"""
ID: str = Field("testProxy", alias="@type")
server: str
port: int
type_: ProxyType = Field(..., alias='type')
dc_id: int
timeout: float
@staticmethod
def read(q: dict) -> TestProxy:
return TestProxy.construct(**q)
```
#### File: api/functions/test_square_int.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class TestSquareInt(BaseObject):
"""
Returns the squared received number; for testing only. This is an offline method. Can be called before authorization
:param x: Number to square
:type x: :class:`int`
"""
ID: str = Field("testSquareInt", alias="@type")
x: int
@staticmethod
def read(q: dict) -> TestSquareInt:
return TestSquareInt.construct(**q)
```
#### File: api/functions/test_use_update.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class TestUseUpdate(BaseObject):
"""
Does nothing and ensures that the Update object is used; for testing only. This is an offline method. Can be called before authorization
"""
ID: str = Field("testUseUpdate", alias="@type")
@staticmethod
def read(q: dict) -> TestUseUpdate:
return TestUseUpdate.construct(**q)
```
#### File: api/functions/toggle_chat_is_pinned.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import ChatList
class ToggleChatIsPinned(BaseObject):
"""
Changes the pinned state of a chat. There can be up to GetOption("pinned_chat_count_max")/GetOption("pinned_archived_chat_count_max") pinned non-secret chats and the same number of secret chats in the main/arhive chat list
:param chat_list: Chat list in which to change the pinned state of the chat
:type chat_list: :class:`ChatList`
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param is_pinned: True, if the chat is pinned
:type is_pinned: :class:`bool`
"""
ID: str = Field("toggleChatIsPinned", alias="@type")
chat_list: ChatList
chat_id: int
is_pinned: bool
@staticmethod
def read(q: dict) -> ToggleChatIsPinned:
return ToggleChatIsPinned.construct(**q)
```
#### File: api/functions/toggle_group_call_screen_sharing_is_paused.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ToggleGroupCallScreenSharingIsPaused(BaseObject):
"""
Pauses or unpauses screen sharing in a joined group call
:param group_call_id: Group call identifier
:type group_call_id: :class:`int`
:param is_paused: True if screen sharing is paused
:type is_paused: :class:`bool`
"""
ID: str = Field("toggleGroupCallScreenSharingIsPaused", alias="@type")
group_call_id: int
is_paused: bool
@staticmethod
def read(q: dict) -> ToggleGroupCallScreenSharingIsPaused:
return ToggleGroupCallScreenSharingIsPaused.construct(**q)
```
#### File: api/functions/unpin_all_chat_messages.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class UnpinAllChatMessages(BaseObject):
"""
Removes all pinned messages from a chat; requires can_pin_messages rights in the group or can_edit_messages rights in the channel
:param chat_id: Identifier of the chat
:type chat_id: :class:`int`
"""
ID: str = Field("unpinAllChatMessages", alias="@type")
chat_id: int
@staticmethod
def read(q: dict) -> UnpinAllChatMessages:
return UnpinAllChatMessages.construct(**q)
```
#### File: api/functions/unpin_chat_message.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class UnpinChatMessage(BaseObject):
"""
Removes a pinned message from a chat; requires can_pin_messages rights in the group or can_edit_messages rights in the channel
:param chat_id: Identifier of the chat
:type chat_id: :class:`int`
:param message_id: Identifier of the removed pinned message
:type message_id: :class:`int`
"""
ID: str = Field("unpinChatMessage", alias="@type")
chat_id: int
message_id: int
@staticmethod
def read(q: dict) -> UnpinChatMessage:
return UnpinChatMessage.construct(**q)
```
#### File: api/functions/validate_order_info.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
from ..types import OrderInfo
class ValidateOrderInfo(BaseObject):
"""
Validates the order information provided by a user and returns the available shipping options for a flexible invoice
:param chat_id: Chat identifier of the Invoice message
:type chat_id: :class:`int`
:param message_id: Message identifier
:type message_id: :class:`int`
:param order_info: The order information, provided by the user; pass null if empty
:type order_info: :class:`OrderInfo`
:param allow_save: True, if the order information can be saved
:type allow_save: :class:`bool`
"""
ID: str = Field("validateOrderInfo", alias="@type")
chat_id: int
message_id: int
order_info: OrderInfo
allow_save: bool
@staticmethod
def read(q: dict) -> ValidateOrderInfo:
return ValidateOrderInfo.construct(**q)
```
#### File: api/functions/write_generated_file_part.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class WriteGeneratedFilePart(BaseObject):
"""
Writes a part of a generated file. This method is intended to be used only if the application has no direct access to TDLib's file system, because it is usually slower than a direct write to the destination file
:param generation_id: The identifier of the generation process
:type generation_id: :class:`int`
:param offset: The offset from which to write the data to the file
:type offset: :class:`int`
:param data: The data to write
:type data: :class:`str`
"""
ID: str = Field("writeGeneratedFilePart", alias="@type")
generation_id: int
offset: int
data: str
@staticmethod
def read(q: dict) -> WriteGeneratedFilePart:
return WriteGeneratedFilePart.construct(**q)
```
#### File: api/types/account_ttl.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class AccountTtl(BaseObject):
"""
Contains information about the period of inactivity after which the current user's account will automatically be deleted
:param days: Number of days of inactivity before the account will be flagged for deletion; 30-366 days
:type days: :class:`int`
"""
ID: str = Field("accountTtl", alias="@type")
days: int
@staticmethod
def read(q: dict) -> AccountTtl:
return AccountTtl.construct(**q)
```
#### File: api/types/authentication_code_info.py
```python
from __future__ import annotations
import typing
from pydantic import Field
from .authentication_code_type import AuthenticationCodeType
from ..base_object import BaseObject
class AuthenticationCodeInfo(BaseObject):
"""
Information about the authentication code that was sent
:param phone_number: A phone number that is being authenticated
:type phone_number: :class:`str`
:param type_: Describes the way the code was sent to the user
:type type_: :class:`AuthenticationCodeType`
:param next_type: Describes the way the next code will be sent to the user; may be null, defaults to None
:type next_type: :class:`AuthenticationCodeType`, optional
:param timeout: Timeout before the code can be re-sent, in seconds
:type timeout: :class:`int`
"""
ID: str = Field("authenticationCodeInfo", alias="@type")
phone_number: str
type_: AuthenticationCodeType = Field(..., alias='type')
next_type: typing.Optional[AuthenticationCodeType] = None
timeout: int
@staticmethod
def read(q: dict) -> AuthenticationCodeInfo:
return AuthenticationCodeInfo.construct(**q)
```
#### File: api/types/backgrounds.py
```python
from __future__ import annotations
from pydantic import Field
from .background import Background
from ..base_object import BaseObject
class Backgrounds(BaseObject):
"""
Contains a list of backgrounds
:param backgrounds: A list of backgrounds
:type backgrounds: :class:`list[Background]`
"""
ID: str = Field("backgrounds", alias="@type")
backgrounds: list[Background]
@staticmethod
def read(q: dict) -> Backgrounds:
return Backgrounds.construct(**q)
```
#### File: api/types/background_type.py
```python
from __future__ import annotations
from pydantic import Field
from .background_fill import BackgroundFill
from ..base_object import BaseObject
class BackgroundType(BaseObject):
"""
Describes the type of a background
"""
ID: str = Field("backgroundType", alias="@type")
class BackgroundTypeFill(BackgroundType):
"""
A filled background
:param fill: Description of the background fill
:type fill: :class:`BackgroundFill`
"""
ID: str = Field("backgroundTypeFill", alias="@type")
fill: BackgroundFill
@staticmethod
def read(q: dict) -> BackgroundTypeFill:
return BackgroundTypeFill.construct(**q)
class BackgroundTypePattern(BackgroundType):
"""
A PNG or TGV (gzipped subset of SVG with MIME type "application/x-tgwallpattern") pattern to be combined with the background fill chosen by the user
:param fill: Description of the background fill
:type fill: :class:`BackgroundFill`
:param intensity: Intensity of the pattern when it is shown above the filled background; 0-100.
:type intensity: :class:`int`
:param is_inverted: True, if the background fill must be applied only to the pattern itself. All other pixels are black in this case. For dark themes only
:type is_inverted: :class:`bool`
:param is_moving: True, if the background needs to be slightly moved when device is tilted
:type is_moving: :class:`bool`
"""
ID: str = Field("backgroundTypePattern", alias="@type")
fill: BackgroundFill
intensity: int
is_inverted: bool
is_moving: bool
@staticmethod
def read(q: dict) -> BackgroundTypePattern:
return BackgroundTypePattern.construct(**q)
class BackgroundTypeWallpaper(BackgroundType):
"""
A wallpaper in JPEG format
:param is_blurred: True, if the wallpaper must be downscaled to fit in 450x450 square and then box-blurred with radius 12
:type is_blurred: :class:`bool`
:param is_moving: True, if the background needs to be slightly moved when device is tilted
:type is_moving: :class:`bool`
"""
ID: str = Field("backgroundTypeWallpaper", alias="@type")
is_blurred: bool
is_moving: bool
@staticmethod
def read(q: dict) -> BackgroundTypeWallpaper:
return BackgroundTypeWallpaper.construct(**q)
```
#### File: api/types/bot_command_scope.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class BotCommandScope(BaseObject):
"""
Represents the scope to which bot commands are relevant
"""
ID: str = Field("botCommandScope", alias="@type")
class BotCommandScopeAllChatAdministrators(BotCommandScope):
"""
A scope covering all group and supergroup chat administrators
"""
ID: str = Field("botCommandScopeAllChatAdministrators", alias="@type")
@staticmethod
def read(q: dict) -> BotCommandScopeAllChatAdministrators:
return BotCommandScopeAllChatAdministrators.construct(**q)
class BotCommandScopeAllGroupChats(BotCommandScope):
"""
A scope covering all group and supergroup chats
"""
ID: str = Field("botCommandScopeAllGroupChats", alias="@type")
@staticmethod
def read(q: dict) -> BotCommandScopeAllGroupChats:
return BotCommandScopeAllGroupChats.construct(**q)
class BotCommandScopeAllPrivateChats(BotCommandScope):
"""
A scope covering all private chats
"""
ID: str = Field("botCommandScopeAllPrivateChats", alias="@type")
@staticmethod
def read(q: dict) -> BotCommandScopeAllPrivateChats:
return BotCommandScopeAllPrivateChats.construct(**q)
class BotCommandScopeChat(BotCommandScope):
"""
A scope covering all members of a chat
:param chat_id: Chat identifier
:type chat_id: :class:`int`
"""
ID: str = Field("botCommandScopeChat", alias="@type")
chat_id: int
@staticmethod
def read(q: dict) -> BotCommandScopeChat:
return BotCommandScopeChat.construct(**q)
class BotCommandScopeChatAdministrators(BotCommandScope):
"""
A scope covering all administrators of a chat
:param chat_id: Chat identifier
:type chat_id: :class:`int`
"""
ID: str = Field("botCommandScopeChatAdministrators", alias="@type")
chat_id: int
@staticmethod
def read(q: dict) -> BotCommandScopeChatAdministrators:
return BotCommandScopeChatAdministrators.construct(**q)
class BotCommandScopeChatMember(BotCommandScope):
"""
A scope covering a member of a chat
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param user_id: User identifier
:type user_id: :class:`int`
"""
ID: str = Field("botCommandScopeChatMember", alias="@type")
chat_id: int
user_id: int
@staticmethod
def read(q: dict) -> BotCommandScopeChatMember:
return BotCommandScopeChatMember.construct(**q)
class BotCommandScopeDefault(BotCommandScope):
"""
A scope covering all users
"""
ID: str = Field("botCommandScopeDefault", alias="@type")
@staticmethod
def read(q: dict) -> BotCommandScopeDefault:
return BotCommandScopeDefault.construct(**q)
```
#### File: api/types/chat_event_log_filters.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ChatEventLogFilters(BaseObject):
"""
Represents a set of filters used to obtain a chat event log
:param message_edits: True, if message edits need to be returned
:type message_edits: :class:`bool`
:param message_deletions: True, if message deletions need to be returned
:type message_deletions: :class:`bool`
:param message_pins: True, if pin/unpin events need to be returned
:type message_pins: :class:`bool`
:param member_joins: True, if members joining events need to be returned
:type member_joins: :class:`bool`
:param member_leaves: True, if members leaving events need to be returned
:type member_leaves: :class:`bool`
:param member_invites: True, if invited member events need to be returned
:type member_invites: :class:`bool`
:param member_promotions: True, if member promotion/demotion events need to be returned
:type member_promotions: :class:`bool`
:param member_restrictions: True, if member restricted/unrestricted/banned/unbanned events need to be returned
:type member_restrictions: :class:`bool`
:param info_changes: True, if changes in chat information need to be returned
:type info_changes: :class:`bool`
:param setting_changes: True, if changes in chat settings need to be returned
:type setting_changes: :class:`bool`
:param invite_link_changes: True, if changes to invite links need to be returned
:type invite_link_changes: :class:`bool`
:param voice_chat_changes: True, if voice chat actions need to be returned
:type voice_chat_changes: :class:`bool`
"""
ID: str = Field("chatEventLogFilters", alias="@type")
message_edits: bool
message_deletions: bool
message_pins: bool
member_joins: bool
member_leaves: bool
member_invites: bool
member_promotions: bool
member_restrictions: bool
info_changes: bool
setting_changes: bool
invite_link_changes: bool
voice_chat_changes: bool
@staticmethod
def read(q: dict) -> ChatEventLogFilters:
return ChatEventLogFilters.construct(**q)
```
#### File: api/types/chat_events.py
```python
from __future__ import annotations
from pydantic import Field
from .chat_event import ChatEvent
from ..base_object import BaseObject
class ChatEvents(BaseObject):
"""
Contains a list of chat events
:param events: List of events
:type events: :class:`list[ChatEvent]`
"""
ID: str = Field("chatEvents", alias="@type")
events: list[ChatEvent]
@staticmethod
def read(q: dict) -> ChatEvents:
return ChatEvents.construct(**q)
```
#### File: api/types/chat_invite_link_count.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ChatInviteLinkCount(BaseObject):
"""
Describes a chat administrator with a number of active and revoked chat invite links
:param user_id: Administrator's user identifier
:type user_id: :class:`int`
:param invite_link_count: Number of active invite links
:type invite_link_count: :class:`int`
:param revoked_invite_link_count: Number of revoked invite links
:type revoked_invite_link_count: :class:`int`
"""
ID: str = Field("chatInviteLinkCount", alias="@type")
user_id: int
invite_link_count: int
revoked_invite_link_count: int
@staticmethod
def read(q: dict) -> ChatInviteLinkCount:
return ChatInviteLinkCount.construct(**q)
```
#### File: api/types/chat_invite_link_counts.py
```python
from __future__ import annotations
from pydantic import Field
from .chat_invite_link_count import ChatInviteLinkCount
from ..base_object import BaseObject
class ChatInviteLinkCounts(BaseObject):
"""
Contains a list of chat invite link counts
:param invite_link_counts: List of invite linkcounts
:type invite_link_counts: :class:`list[ChatInviteLinkCount]`
"""
ID: str = Field("chatInviteLinkCounts", alias="@type")
invite_link_counts: list[ChatInviteLinkCount]
@staticmethod
def read(q: dict) -> ChatInviteLinkCounts:
return ChatInviteLinkCounts.construct(**q)
```
#### File: api/types/chat_photo.py
```python
from __future__ import annotations
import typing
from pydantic import Field
from .animated_chat_photo import AnimatedChatPhoto
from .minithumbnail import Minithumbnail
from .photo_size import PhotoSize
from ..base_object import BaseObject
class ChatPhoto(BaseObject):
"""
Describes a chat or user profile photo
:param id: Unique photo identifier
:type id: :class:`int`
:param added_date: Point in time (Unix timestamp) when the photo has been added
:type added_date: :class:`int`
:param minithumbnail: Photo minithumbnail; may be null, defaults to None
:type minithumbnail: :class:`Minithumbnail`, optional
:param sizes: Available variants of the photo in JPEG format, in different size
:type sizes: :class:`list[PhotoSize]`
:param animation: Animated variant of the photo in MPEG4 format; may be null, defaults to None
:type animation: :class:`AnimatedChatPhoto`, optional
"""
ID: str = Field("chatPhoto", alias="@type")
id: int
added_date: int
minithumbnail: typing.Optional[Minithumbnail] = None
sizes: list[PhotoSize]
animation: typing.Optional[AnimatedChatPhoto] = None
@staticmethod
def read(q: dict) -> ChatPhoto:
return ChatPhoto.construct(**q)
```
#### File: api/types/chat_report_reason.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ChatReportReason(BaseObject):
"""
Describes the reason why a chat is reported
"""
ID: str = Field("chatReportReason", alias="@type")
class ChatReportReasonChildAbuse(ChatReportReason):
"""
The chat has child abuse related content
"""
ID: str = Field("chatReportReasonChildAbuse", alias="@type")
@staticmethod
def read(q: dict) -> ChatReportReasonChildAbuse:
return ChatReportReasonChildAbuse.construct(**q)
class ChatReportReasonCopyright(ChatReportReason):
"""
The chat contains copyrighted content
"""
ID: str = Field("chatReportReasonCopyright", alias="@type")
@staticmethod
def read(q: dict) -> ChatReportReasonCopyright:
return ChatReportReasonCopyright.construct(**q)
class ChatReportReasonCustom(ChatReportReason):
"""
A custom reason provided by the user
"""
ID: str = Field("chatReportReasonCustom", alias="@type")
@staticmethod
def read(q: dict) -> ChatReportReasonCustom:
return ChatReportReasonCustom.construct(**q)
class ChatReportReasonFake(ChatReportReason):
"""
The chat represents a fake account
"""
ID: str = Field("chatReportReasonFake", alias="@type")
@staticmethod
def read(q: dict) -> ChatReportReasonFake:
return ChatReportReasonFake.construct(**q)
class ChatReportReasonPornography(ChatReportReason):
"""
The chat contains pornographic messages
"""
ID: str = Field("chatReportReasonPornography", alias="@type")
@staticmethod
def read(q: dict) -> ChatReportReasonPornography:
return ChatReportReasonPornography.construct(**q)
class ChatReportReasonSpam(ChatReportReason):
"""
The chat contains spam messages
"""
ID: str = Field("chatReportReasonSpam", alias="@type")
@staticmethod
def read(q: dict) -> ChatReportReasonSpam:
return ChatReportReasonSpam.construct(**q)
class ChatReportReasonUnrelatedLocation(ChatReportReason):
"""
The location-based chat is unrelated to its stated location
"""
ID: str = Field("chatReportReasonUnrelatedLocation", alias="@type")
@staticmethod
def read(q: dict) -> ChatReportReasonUnrelatedLocation:
return ChatReportReasonUnrelatedLocation.construct(**q)
class ChatReportReasonViolence(ChatReportReason):
"""
The chat promotes violence
"""
ID: str = Field("chatReportReasonViolence", alias="@type")
@staticmethod
def read(q: dict) -> ChatReportReasonViolence:
return ChatReportReasonViolence.construct(**q)
```
#### File: api/types/check_chat_username_result.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class CheckChatUsernameResult(BaseObject):
"""
Represents result of checking whether a username can be set for a chat
"""
ID: str = Field("checkChatUsernameResult", alias="@type")
class CheckChatUsernameResultOk(CheckChatUsernameResult):
"""
The username can be set
"""
ID: str = Field("checkChatUsernameResultOk", alias="@type")
@staticmethod
def read(q: dict) -> CheckChatUsernameResultOk:
return CheckChatUsernameResultOk.construct(**q)
class CheckChatUsernameResultPublicChatsTooMuch(CheckChatUsernameResult):
"""
The user has too much chats with username, one of them must be made private first
"""
ID: str = Field("checkChatUsernameResultPublicChatsTooMuch", alias="@type")
@staticmethod
def read(q: dict) -> CheckChatUsernameResultPublicChatsTooMuch:
return CheckChatUsernameResultPublicChatsTooMuch.construct(**q)
class CheckChatUsernameResultPublicGroupsUnavailable(CheckChatUsernameResult):
"""
The user can't be a member of a public supergroup
"""
ID: str = Field("checkChatUsernameResultPublicGroupsUnavailable", alias="@type")
@staticmethod
def read(q: dict) -> CheckChatUsernameResultPublicGroupsUnavailable:
return CheckChatUsernameResultPublicGroupsUnavailable.construct(**q)
class CheckChatUsernameResultUsernameInvalid(CheckChatUsernameResult):
"""
The username is invalid
"""
ID: str = Field("checkChatUsernameResultUsernameInvalid", alias="@type")
@staticmethod
def read(q: dict) -> CheckChatUsernameResultUsernameInvalid:
return CheckChatUsernameResultUsernameInvalid.construct(**q)
class CheckChatUsernameResultUsernameOccupied(CheckChatUsernameResult):
"""
The username is occupied
"""
ID: str = Field("checkChatUsernameResultUsernameOccupied", alias="@type")
@staticmethod
def read(q: dict) -> CheckChatUsernameResultUsernameOccupied:
return CheckChatUsernameResultUsernameOccupied.construct(**q)
```
#### File: api/types/connected_websites.py
```python
from __future__ import annotations
from pydantic import Field
from .connected_website import ConnectedWebsite
from ..base_object import BaseObject
class ConnectedWebsites(BaseObject):
"""
Contains a list of websites the current user is logged in with Telegram
:param websites: List of connected websites
:type websites: :class:`list[ConnectedWebsite]`
"""
ID: str = Field("connectedWebsites", alias="@type")
websites: list[ConnectedWebsite]
@staticmethod
def read(q: dict) -> ConnectedWebsites:
return ConnectedWebsites.construct(**q)
```
#### File: api/types/contact.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class Contact(BaseObject):
"""
Describes a user contact
:param phone_number: Phone number of the user
:type phone_number: :class:`str`
:param first_name: First name of the user; 1-255 characters in length
:type first_name: :class:`str`
:param last_name: Last name of the user
:type last_name: :class:`str`
:param vcard: Additional data about the user in a form of vCard; 0-2048 bytes in length
:type vcard: :class:`str`
:param user_id: Identifier of the user, if known; otherwise 0
:type user_id: :class:`int`
"""
ID: str = Field("contact", alias="@type")
phone_number: str
first_name: str = Field(..., min_length=1, max_length=255)
last_name: str
vcard: str
user_id: int
@staticmethod
def read(q: dict) -> Contact:
return Contact.construct(**q)
```
#### File: api/types/dated_file.py
```python
from __future__ import annotations
from pydantic import Field
from .file import File
from ..base_object import BaseObject
class DatedFile(BaseObject):
"""
File with the date it was uploaded
:param file: The file
:type file: :class:`File`
:param date: Point in time (Unix timestamp) when the file was uploaded
:type date: :class:`int`
"""
ID: str = Field("datedFile", alias="@type")
file: File
date: int
@staticmethod
def read(q: dict) -> DatedFile:
return DatedFile.construct(**q)
```
#### File: api/types/date_range.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class DateRange(BaseObject):
"""
Represents a date range
:param start_date: Point in time (Unix timestamp) at which the date range begins
:type start_date: :class:`int`
:param end_date: Point in time (Unix timestamp) at which the date range ends
:type end_date: :class:`int`
"""
ID: str = Field("dateRange", alias="@type")
start_date: int
end_date: int
@staticmethod
def read(q: dict) -> DateRange:
return DateRange.construct(**q)
```
#### File: api/types/deep_link_info.py
```python
from __future__ import annotations
from pydantic import Field
from .formatted_text import FormattedText
from ..base_object import BaseObject
class DeepLinkInfo(BaseObject):
"""
Contains information about a tg: deep link
:param text: Text to be shown to the user
:type text: :class:`FormattedText`
:param need_update_application: True, if the user must be asked to update the application
:type need_update_application: :class:`bool`
"""
ID: str = Field("deepLinkInfo", alias="@type")
text: FormattedText
need_update_application: bool
@staticmethod
def read(q: dict) -> DeepLinkInfo:
return DeepLinkInfo.construct(**q)
```
#### File: api/types/dice_stickers.py
```python
from __future__ import annotations
from pydantic import Field
from .sticker import Sticker
from ..base_object import BaseObject
class DiceStickers(BaseObject):
"""
Contains animated stickers which must be used for dice animation rendering
"""
ID: str = Field("diceStickers", alias="@type")
class DiceStickersRegular(DiceStickers):
"""
A regular animated sticker
:param sticker: The animated sticker with the dice animation
:type sticker: :class:`Sticker`
"""
ID: str = Field("diceStickersRegular", alias="@type")
sticker: Sticker
@staticmethod
def read(q: dict) -> DiceStickersRegular:
return DiceStickersRegular.construct(**q)
class DiceStickersSlotMachine(DiceStickers):
"""
Animated stickers to be combined into a slot machine
:param background: The animated sticker with the slot machine background. The background animation must start playing after all reel animations finish
:type background: :class:`Sticker`
:param lever: The animated sticker with the lever animation. The lever animation must play once in the initial dice state
:type lever: :class:`Sticker`
:param left_reel: The animated sticker with the left reel
:type left_reel: :class:`Sticker`
:param center_reel: The animated sticker with the center reel
:type center_reel: :class:`Sticker`
:param right_reel: The animated sticker with the right reel
:type right_reel: :class:`Sticker`
"""
ID: str = Field("diceStickersSlotMachine", alias="@type")
background: Sticker
lever: Sticker
left_reel: Sticker
center_reel: Sticker
right_reel: Sticker
@staticmethod
def read(q: dict) -> DiceStickersSlotMachine:
return DiceStickersSlotMachine.construct(**q)
```
#### File: api/types/file_type.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class FileType(BaseObject):
"""
Represents the type of a file
"""
ID: str = Field("fileType", alias="@type")
class FileTypeAnimation(FileType):
"""
The file is an animation
"""
ID: str = Field("fileTypeAnimation", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeAnimation:
return FileTypeAnimation.construct(**q)
class FileTypeAudio(FileType):
"""
The file is an audio file
"""
ID: str = Field("fileTypeAudio", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeAudio:
return FileTypeAudio.construct(**q)
class FileTypeDocument(FileType):
"""
The file is a document
"""
ID: str = Field("fileTypeDocument", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeDocument:
return FileTypeDocument.construct(**q)
class FileTypeNone(FileType):
"""
The data is not a file
"""
ID: str = Field("fileTypeNone", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeNone:
return FileTypeNone.construct(**q)
class FileTypePhoto(FileType):
"""
The file is a photo
"""
ID: str = Field("fileTypePhoto", alias="@type")
@staticmethod
def read(q: dict) -> FileTypePhoto:
return FileTypePhoto.construct(**q)
class FileTypeProfilePhoto(FileType):
"""
The file is a profile photo
"""
ID: str = Field("fileTypeProfilePhoto", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeProfilePhoto:
return FileTypeProfilePhoto.construct(**q)
class FileTypeSecret(FileType):
"""
The file was sent to a secret chat (the file type is not known to the server)
"""
ID: str = Field("fileTypeSecret", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeSecret:
return FileTypeSecret.construct(**q)
class FileTypeSecretThumbnail(FileType):
"""
The file is a thumbnail of a file from a secret chat
"""
ID: str = Field("fileTypeSecretThumbnail", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeSecretThumbnail:
return FileTypeSecretThumbnail.construct(**q)
class FileTypeSecure(FileType):
"""
The file is a file from Secure storage used for storing Telegram Passport files
"""
ID: str = Field("fileTypeSecure", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeSecure:
return FileTypeSecure.construct(**q)
class FileTypeSticker(FileType):
"""
The file is a sticker
"""
ID: str = Field("fileTypeSticker", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeSticker:
return FileTypeSticker.construct(**q)
class FileTypeThumbnail(FileType):
"""
The file is a thumbnail of another file
"""
ID: str = Field("fileTypeThumbnail", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeThumbnail:
return FileTypeThumbnail.construct(**q)
class FileTypeUnknown(FileType):
"""
The file type is not yet known
"""
ID: str = Field("fileTypeUnknown", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeUnknown:
return FileTypeUnknown.construct(**q)
class FileTypeVideo(FileType):
"""
The file is a video
"""
ID: str = Field("fileTypeVideo", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeVideo:
return FileTypeVideo.construct(**q)
class FileTypeVideoNote(FileType):
"""
The file is a video note
"""
ID: str = Field("fileTypeVideoNote", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeVideoNote:
return FileTypeVideoNote.construct(**q)
class FileTypeVoiceNote(FileType):
"""
The file is a voice note
"""
ID: str = Field("fileTypeVoiceNote", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeVoiceNote:
return FileTypeVoiceNote.construct(**q)
class FileTypeWallpaper(FileType):
"""
The file is a wallpaper or a background pattern
"""
ID: str = Field("fileTypeWallpaper", alias="@type")
@staticmethod
def read(q: dict) -> FileTypeWallpaper:
return FileTypeWallpaper.construct(**q)
```
#### File: api/types/formatted_text.py
```python
from __future__ import annotations
from pydantic import Field
from .text_entity import TextEntity
from ..base_object import BaseObject
class FormattedText(BaseObject):
"""
A text with some entities
:param text: The text
:type text: :class:`str`
:param entities: Entities contained in the text. Entities can be nested, but must not mutually intersect with each other. Pre, Code and PreCode entities can't contain other entities. Bold, Italic, Underline and Strikethrough entities can contain and to be contained in all other entities. All other entities can't contain each other
:type entities: :class:`list[TextEntity]`
"""
ID: str = Field("formattedText", alias="@type")
text: str
entities: list[TextEntity]
@staticmethod
def read(q: dict) -> FormattedText:
return FormattedText.construct(**q)
```
#### File: api/types/found_messages.py
```python
from __future__ import annotations
from pydantic import Field
from .message import Message
from ..base_object import BaseObject
class FoundMessages(BaseObject):
"""
Contains a list of messages found by a search
:param total_count: Approximate total count of messages found; -1 if unknown
:type total_count: :class:`int`
:param messages: List of messages
:type messages: :class:`list[Message]`
:param next_offset: The offset for the next request. If empty, there are no more results
:type next_offset: :class:`str`
"""
ID: str = Field("foundMessages", alias="@type")
total_count: int
messages: list[Message]
next_offset: str
@staticmethod
def read(q: dict) -> FoundMessages:
return FoundMessages.construct(**q)
```
#### File: api/types/group_call_id.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class GroupCallId(BaseObject):
"""
Contains the group call identifier
:param id: Group call identifier
:type id: :class:`int`
"""
ID: str = Field("groupCallId", alias="@type")
id: int
@staticmethod
def read(q: dict) -> GroupCallId:
return GroupCallId.construct(**q)
```
#### File: api/types/group_call_participant_video_info.py
```python
from __future__ import annotations
from pydantic import Field
from .group_call_video_source_group import GroupCallVideoSourceGroup
from ..base_object import BaseObject
class GroupCallParticipantVideoInfo(BaseObject):
"""
Contains information about a group call participant's video channel
:param source_groups: List of synchronization source groups of the video
:type source_groups: :class:`list[GroupCallVideoSourceGroup]`
:param endpoint_id: Video channel endpoint identifier
:type endpoint_id: :class:`str`
:param is_paused: True if the video is paused. This flag needs to be ignored, if new video frames are received
:type is_paused: :class:`bool`
"""
ID: str = Field("groupCallParticipantVideoInfo", alias="@type")
source_groups: list[GroupCallVideoSourceGroup]
endpoint_id: str
is_paused: bool
@staticmethod
def read(q: dict) -> GroupCallParticipantVideoInfo:
return GroupCallParticipantVideoInfo.construct(**q)
```
#### File: api/types/imported_contacts.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ImportedContacts(BaseObject):
"""
Represents the result of an ImportContacts request
:param user_ids: User identifiers of the imported contacts in the same order as they were specified in the request; 0 if the contact is not yet a registered user
:type user_ids: :class:`list[int]`
:param importer_count: The number of users that imported the corresponding contact; 0 for already registered users or if unavailable
:type importer_count: :class:`list[int]`
"""
ID: str = Field("importedContacts", alias="@type")
user_ids: list[int]
importer_count: list[int]
@staticmethod
def read(q: dict) -> ImportedContacts:
return ImportedContacts.construct(**q)
```
#### File: api/types/inline_query_result.py
```python
from __future__ import annotations
import typing
from pydantic import Field
from .animation import Animation
from .audio import Audio
from .contact import Contact
from .document import Document
from .game import Game
from .location import Location
from .photo import Photo
from .sticker import Sticker
from .thumbnail import Thumbnail
from .venue import Venue
from .video import Video
from .voice_note import VoiceNote
from ..base_object import BaseObject
class InlineQueryResult(BaseObject):
"""
Represents a single result of an inline query
"""
ID: str = Field("inlineQueryResult", alias="@type")
class InlineQueryResultAnimation(InlineQueryResult):
"""
Represents an animation file
:param id: Unique identifier of the query result
:type id: :class:`str`
:param animation: Animation file
:type animation: :class:`Animation`
:param title: Animation title
:type title: :class:`str`
"""
ID: str = Field("inlineQueryResultAnimation", alias="@type")
id: str
animation: Animation
title: str
@staticmethod
def read(q: dict) -> InlineQueryResultAnimation:
return InlineQueryResultAnimation.construct(**q)
class InlineQueryResultArticle(InlineQueryResult):
"""
Represents a link to an article or web page
:param id: Unique identifier of the query result
:type id: :class:`str`
:param url: URL of the result, if it exists
:type url: :class:`str`
:param hide_url: True, if the URL must be not shown
:type hide_url: :class:`bool`
:param title: Title of the result
:type title: :class:`str`
:param param_description: A short description of the result
:type param_description: :class:`str`
:param thumbnail: Result thumbnail in JPEG format; may be null, defaults to None
:type thumbnail: :class:`Thumbnail`, optional
"""
ID: str = Field("inlineQueryResultArticle", alias="@type")
id: str
url: str
hide_url: bool
title: str
param_description: str
thumbnail: typing.Optional[Thumbnail] = None
@staticmethod
def read(q: dict) -> InlineQueryResultArticle:
return InlineQueryResultArticle.construct(**q)
class InlineQueryResultAudio(InlineQueryResult):
"""
Represents an audio file
:param id: Unique identifier of the query result
:type id: :class:`str`
:param audio: Audio file
:type audio: :class:`Audio`
"""
ID: str = Field("inlineQueryResultAudio", alias="@type")
id: str
audio: Audio
@staticmethod
def read(q: dict) -> InlineQueryResultAudio:
return InlineQueryResultAudio.construct(**q)
class InlineQueryResultContact(InlineQueryResult):
"""
Represents a user contact
:param id: Unique identifier of the query result
:type id: :class:`str`
:param contact: A user contact
:type contact: :class:`Contact`
:param thumbnail: Result thumbnail in JPEG format; may be null, defaults to None
:type thumbnail: :class:`Thumbnail`, optional
"""
ID: str = Field("inlineQueryResultContact", alias="@type")
id: str
contact: Contact
thumbnail: typing.Optional[Thumbnail] = None
@staticmethod
def read(q: dict) -> InlineQueryResultContact:
return InlineQueryResultContact.construct(**q)
class InlineQueryResultDocument(InlineQueryResult):
"""
Represents a document
:param id: Unique identifier of the query result
:type id: :class:`str`
:param document: Document
:type document: :class:`Document`
:param title: Document title
:type title: :class:`str`
:param param_description: Document description
:type param_description: :class:`str`
"""
ID: str = Field("inlineQueryResultDocument", alias="@type")
id: str
document: Document
title: str
param_description: str
@staticmethod
def read(q: dict) -> InlineQueryResultDocument:
return InlineQueryResultDocument.construct(**q)
class InlineQueryResultGame(InlineQueryResult):
"""
Represents information about a game
:param id: Unique identifier of the query result
:type id: :class:`str`
:param game: Game result
:type game: :class:`Game`
"""
ID: str = Field("inlineQueryResultGame", alias="@type")
id: str
game: Game
@staticmethod
def read(q: dict) -> InlineQueryResultGame:
return InlineQueryResultGame.construct(**q)
class InlineQueryResultLocation(InlineQueryResult):
"""
Represents a point on the map
:param id: Unique identifier of the query result
:type id: :class:`str`
:param location: Location result
:type location: :class:`Location`
:param title: Title of the result
:type title: :class:`str`
:param thumbnail: Result thumbnail in JPEG format; may be null, defaults to None
:type thumbnail: :class:`Thumbnail`, optional
"""
ID: str = Field("inlineQueryResultLocation", alias="@type")
id: str
location: Location
title: str
thumbnail: typing.Optional[Thumbnail] = None
@staticmethod
def read(q: dict) -> InlineQueryResultLocation:
return InlineQueryResultLocation.construct(**q)
class InlineQueryResultPhoto(InlineQueryResult):
"""
Represents a photo
:param id: Unique identifier of the query result
:type id: :class:`str`
:param photo: Photo
:type photo: :class:`Photo`
:param title: Title of the result, if known
:type title: :class:`str`
:param param_description: A short description of the result, if known
:type param_description: :class:`str`
"""
ID: str = Field("inlineQueryResultPhoto", alias="@type")
id: str
photo: Photo
title: str
param_description: str
@staticmethod
def read(q: dict) -> InlineQueryResultPhoto:
return InlineQueryResultPhoto.construct(**q)
class InlineQueryResultSticker(InlineQueryResult):
"""
Represents a sticker
:param id: Unique identifier of the query result
:type id: :class:`str`
:param sticker: Sticker
:type sticker: :class:`Sticker`
"""
ID: str = Field("inlineQueryResultSticker", alias="@type")
id: str
sticker: Sticker
@staticmethod
def read(q: dict) -> InlineQueryResultSticker:
return InlineQueryResultSticker.construct(**q)
class InlineQueryResultVenue(InlineQueryResult):
"""
Represents information about a venue
:param id: Unique identifier of the query result
:type id: :class:`str`
:param venue: Venue result
:type venue: :class:`Venue`
:param thumbnail: Result thumbnail in JPEG format; may be null, defaults to None
:type thumbnail: :class:`Thumbnail`, optional
"""
ID: str = Field("inlineQueryResultVenue", alias="@type")
id: str
venue: Venue
thumbnail: typing.Optional[Thumbnail] = None
@staticmethod
def read(q: dict) -> InlineQueryResultVenue:
return InlineQueryResultVenue.construct(**q)
class InlineQueryResultVideo(InlineQueryResult):
"""
Represents a video
:param id: Unique identifier of the query result
:type id: :class:`str`
:param video: Video
:type video: :class:`Video`
:param title: Title of the video
:type title: :class:`str`
:param param_description: Description of the video
:type param_description: :class:`str`
"""
ID: str = Field("inlineQueryResultVideo", alias="@type")
id: str
video: Video
title: str
param_description: str
@staticmethod
def read(q: dict) -> InlineQueryResultVideo:
return InlineQueryResultVideo.construct(**q)
class InlineQueryResultVoiceNote(InlineQueryResult):
"""
Represents a voice note
:param id: Unique identifier of the query result
:type id: :class:`str`
:param voice_note: Voice note
:type voice_note: :class:`VoiceNote`
:param title: Title of the voice note
:type title: :class:`str`
"""
ID: str = Field("inlineQueryResultVoiceNote", alias="@type")
id: str
voice_note: VoiceNote
title: str
@staticmethod
def read(q: dict) -> InlineQueryResultVoiceNote:
return InlineQueryResultVoiceNote.construct(**q)
```
#### File: api/types/language_pack_strings.py
```python
from __future__ import annotations
from pydantic import Field
from .language_pack_string import LanguagePackString
from ..base_object import BaseObject
class LanguagePackStrings(BaseObject):
"""
Contains a list of language pack strings
:param strings: A list of language pack strings
:type strings: :class:`list[LanguagePackString]`
"""
ID: str = Field("languagePackStrings", alias="@type")
strings: list[LanguagePackString]
@staticmethod
def read(q: dict) -> LanguagePackStrings:
return LanguagePackStrings.construct(**q)
```
#### File: api/types/login_url_info.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class LoginUrlInfo(BaseObject):
"""
Contains information about an inline button of type inlineKeyboardButtonTypeLoginUrl
"""
ID: str = Field("loginUrlInfo", alias="@type")
class LoginUrlInfoOpen(LoginUrlInfo):
"""
An HTTP url needs to be open
:param url: The URL to open
:type url: :class:`str`
:param skip_confirm: True, if there is no need to show an ordinary open URL confirm
:type skip_confirm: :class:`bool`
"""
ID: str = Field("loginUrlInfoOpen", alias="@type")
url: str
skip_confirm: bool
@staticmethod
def read(q: dict) -> LoginUrlInfoOpen:
return LoginUrlInfoOpen.construct(**q)
class LoginUrlInfoRequestConfirmation(LoginUrlInfo):
"""
An authorization confirmation dialog needs to be shown to the user
:param url: An HTTP URL to be opened
:type url: :class:`str`
:param domain: A domain of the URL
:type domain: :class:`str`
:param bot_user_id: User identifier of a bot linked with the website
:type bot_user_id: :class:`int`
:param request_write_access: True, if the user needs to be requested to give the permission to the bot to send them messages
:type request_write_access: :class:`bool`
"""
ID: str = Field("loginUrlInfoRequestConfirmation", alias="@type")
url: str
domain: str
bot_user_id: int
request_write_access: bool
@staticmethod
def read(q: dict) -> LoginUrlInfoRequestConfirmation:
return LoginUrlInfoRequestConfirmation.construct(**q)
```
#### File: api/types/mask_point.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class MaskPoint(BaseObject):
"""
Part of the face, relative to which a mask is placed
"""
ID: str = Field("maskPoint", alias="@type")
class MaskPointChin(MaskPoint):
"""
The mask is placed relatively to the chin
"""
ID: str = Field("maskPointChin", alias="@type")
@staticmethod
def read(q: dict) -> MaskPointChin:
return MaskPointChin.construct(**q)
class MaskPointEyes(MaskPoint):
"""
The mask is placed relatively to the eyes
"""
ID: str = Field("maskPointEyes", alias="@type")
@staticmethod
def read(q: dict) -> MaskPointEyes:
return MaskPointEyes.construct(**q)
class MaskPointForehead(MaskPoint):
"""
The mask is placed relatively to the forehead
"""
ID: str = Field("maskPointForehead", alias="@type")
@staticmethod
def read(q: dict) -> MaskPointForehead:
return MaskPointForehead.construct(**q)
class MaskPointMouth(MaskPoint):
"""
The mask is placed relatively to the mouth
"""
ID: str = Field("maskPointMouth", alias="@type")
@staticmethod
def read(q: dict) -> MaskPointMouth:
return MaskPointMouth.construct(**q)
```
#### File: api/types/mask_position.py
```python
from __future__ import annotations
from pydantic import Field
from .mask_point import MaskPoint
from ..base_object import BaseObject
class MaskPosition(BaseObject):
"""
Position on a photo where a mask is placed
:param point: Part of the face, relative to which the mask is placed
:type point: :class:`MaskPoint`
:param x_shift: Shift by X-axis measured in widths of the mask scaled to the face size, from left to right. (For example, -1.0 will place the mask just to the left of the default mask position)
:type x_shift: :class:`float`
:param y_shift: Shift by Y-axis measured in heights of the mask scaled to the face size, from top to bottom. (For example, 1.0 will place the mask just below the default mask position)
:type y_shift: :class:`float`
:param scale: Mask scaling coefficient. (For example, 2.0 means a doubled size)
:type scale: :class:`float`
"""
ID: str = Field("maskPosition", alias="@type")
point: MaskPoint
x_shift: float
y_shift: float
scale: float
@staticmethod
def read(q: dict) -> MaskPosition:
return MaskPosition.construct(**q)
```
#### File: api/types/message_file_type.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class MessageFileType(BaseObject):
"""
Contains information about a file with messages exported from another app
"""
ID: str = Field("messageFileType", alias="@type")
class MessageFileTypeGroup(MessageFileType):
"""
The messages was exported from a group chat
:param title: Title of the group chat; may be empty if unrecognized
:type title: :class:`str`
"""
ID: str = Field("messageFileTypeGroup", alias="@type")
title: str
@staticmethod
def read(q: dict) -> MessageFileTypeGroup:
return MessageFileTypeGroup.construct(**q)
class MessageFileTypePrivate(MessageFileType):
"""
The messages was exported from a private chat
:param name: Name of the other party; may be empty if unrecognized
:type name: :class:`str`
"""
ID: str = Field("messageFileTypePrivate", alias="@type")
name: str
@staticmethod
def read(q: dict) -> MessageFileTypePrivate:
return MessageFileTypePrivate.construct(**q)
class MessageFileTypeUnknown(MessageFileType):
"""
The messages was exported from a chat of unknown type
"""
ID: str = Field("messageFileTypeUnknown", alias="@type")
@staticmethod
def read(q: dict) -> MessageFileTypeUnknown:
return MessageFileTypeUnknown.construct(**q)
```
#### File: api/types/message.py
```python
from __future__ import annotations
import typing
from pydantic import Field
from .message_content import MessageContent
from .message_forward_info import MessageForwardInfo
from .message_interaction_info import MessageInteractionInfo
from .message_scheduling_state import MessageSchedulingState
from .message_sender import MessageSender
from .message_sending_state import MessageSendingState
from .reply_markup import ReplyMarkup
from ..base_object import BaseObject
class Message(BaseObject):
"""
Describes a message
:param id: Message identifier; unique for the chat to which the message belongs
:type id: :class:`int`
:param sender: The sender of the message
:type sender: :class:`MessageSender`
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param sending_state: Information about the sending state of the message; may be null, defaults to None
:type sending_state: :class:`MessageSendingState`, optional
:param scheduling_state: Information about the scheduling state of the message; may be null, defaults to None
:type scheduling_state: :class:`MessageSchedulingState`, optional
:param is_outgoing: True, if the message is outgoing
:type is_outgoing: :class:`bool`
:param is_pinned: True, if the message is pinned
:type is_pinned: :class:`bool`
:param can_be_edited: True, if the message can be edited. For live location and poll messages this fields shows whether editMessageLiveLocation or stopPoll can be used with this message by the application
:type can_be_edited: :class:`bool`
:param can_be_forwarded: True, if the message can be forwarded
:type can_be_forwarded: :class:`bool`
:param can_be_deleted_only_for_self: True, if the message can be deleted only for the current user while other users will continue to see it
:type can_be_deleted_only_for_self: :class:`bool`
:param can_be_deleted_for_all_users: True, if the message can be deleted for all users
:type can_be_deleted_for_all_users: :class:`bool`
:param can_get_statistics: True, if the message statistics are available
:type can_get_statistics: :class:`bool`
:param can_get_message_thread: True, if the message thread info is available
:type can_get_message_thread: :class:`bool`
:param can_get_viewers: True, if chat members already viewed the message can be received through getMessageViewers
:type can_get_viewers: :class:`bool`
:param can_get_media_timestamp_links: True, if media timestamp links can be generated for media timestamp entities in the message text, caption or web page description
:type can_get_media_timestamp_links: :class:`bool`
:param has_timestamped_media: True, if media timestamp entities refers to a media in this message as opposed to a media in the replied message
:type has_timestamped_media: :class:`bool`
:param is_channel_post: True, if the message is a channel post. All messages to channels are channel posts, all other messages are not channel posts
:type is_channel_post: :class:`bool`
:param contains_unread_mention: True, if the message contains an unread mention for the current user
:type contains_unread_mention: :class:`bool`
:param date: Point in time (Unix timestamp) when the message was sent
:type date: :class:`int`
:param edit_date: Point in time (Unix timestamp) when the message was last edited
:type edit_date: :class:`int`
:param forward_info: Information about the initial message sender; may be null, defaults to None
:type forward_info: :class:`MessageForwardInfo`, optional
:param interaction_info: Information about interactions with the message; may be null, defaults to None
:type interaction_info: :class:`MessageInteractionInfo`, optional
:param reply_in_chat_id: If non-zero, the identifier of the chat to which the replied message belongs; Currently, only messages in the Replies chat can have different reply_in_chat_id and chat_id
:type reply_in_chat_id: :class:`int`
:param reply_to_message_id: If non-zero, the identifier of the message this message is replying to; can be the identifier of a deleted message
:type reply_to_message_id: :class:`int`
:param message_thread_id: If non-zero, the identifier of the message thread the message belongs to; unique within the chat to which the message belongs
:type message_thread_id: :class:`int`
:param ttl: For self-destructing messages, the message's TTL (Time To Live), in seconds; 0 if none. TDLib will send updateDeleteMessages or updateMessageContent once the TTL expires
:type ttl: :class:`int`
:param ttl_expires_in: Time left before the message expires, in seconds. If the TTL timer isn't started yet, equals to the value of the ttl field
:type ttl_expires_in: :class:`float`
:param via_bot_user_id: If non-zero, the user identifier of the bot through which this message was sent
:type via_bot_user_id: :class:`int`
:param author_signature: For channel posts and anonymous group messages, optional author signature
:type author_signature: :class:`str`
:param media_album_id: Unique identifier of an album this message belongs to. Only audios, documents, photos and videos can be grouped together in albums
:type media_album_id: :class:`int`
:param restriction_reason: If non-empty, contains a human-readable description of the reason why access to this message must be restricted
:type restriction_reason: :class:`str`
:param content: Content of the message
:type content: :class:`MessageContent`
:param reply_markup: Reply markup for the message; may be null, defaults to None
:type reply_markup: :class:`ReplyMarkup`, optional
"""
ID: str = Field("message", alias="@type")
id: int
sender: MessageSender
chat_id: int
sending_state: typing.Optional[MessageSendingState] = None
scheduling_state: typing.Optional[MessageSchedulingState] = None
is_outgoing: bool
is_pinned: bool
can_be_edited: bool
can_be_forwarded: bool
can_be_deleted_only_for_self: bool
can_be_deleted_for_all_users: bool
can_get_statistics: bool
can_get_message_thread: bool
can_get_viewers: bool
can_get_media_timestamp_links: bool
has_timestamped_media: bool
is_channel_post: bool
contains_unread_mention: bool
date: int
edit_date: int
forward_info: typing.Optional[MessageForwardInfo] = None
interaction_info: typing.Optional[MessageInteractionInfo] = None
reply_in_chat_id: int
reply_to_message_id: int
message_thread_id: int
ttl: int
ttl_expires_in: float
via_bot_user_id: int
author_signature: str
media_album_id: int
restriction_reason: str
content: MessageContent
reply_markup: typing.Optional[ReplyMarkup] = None
@staticmethod
def read(q: dict) -> Message:
return Message.construct(**q)
```
#### File: api/types/message_sending_state.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class MessageSendingState(BaseObject):
"""
Contains information about the sending state of the message
"""
ID: str = Field("messageSendingState", alias="@type")
class MessageSendingStateFailed(MessageSendingState):
"""
The message failed to be sent
:param error_code: An error code; 0 if unknown
:type error_code: :class:`int`
:param error_message: Error message
:type error_message: :class:`str`
:param can_retry: True, if the message can be re-sent
:type can_retry: :class:`bool`
:param retry_after: Time left before the message can be re-sent, in seconds. No update is sent when this field changes
:type retry_after: :class:`float`
"""
ID: str = Field("messageSendingStateFailed", alias="@type")
error_code: int
error_message: str
can_retry: bool
retry_after: float
@staticmethod
def read(q: dict) -> MessageSendingStateFailed:
return MessageSendingStateFailed.construct(**q)
class MessageSendingStatePending(MessageSendingState):
"""
The message is being sent now, but has not yet been delivered to the server
"""
ID: str = Field("messageSendingStatePending", alias="@type")
@staticmethod
def read(q: dict) -> MessageSendingStatePending:
return MessageSendingStatePending.construct(**q)
```
#### File: api/types/minithumbnail.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class Minithumbnail(BaseObject):
"""
Thumbnail image of a very poor quality and low resolution
:param width: Thumbnail width, usually doesn't exceed 40
:type width: :class:`int`
:param height: Thumbnail height, usually doesn't exceed 40
:type height: :class:`int`
:param data: The thumbnail in JPEG format
:type data: :class:`str`
"""
ID: str = Field("minithumbnail", alias="@type")
width: int
height: int
data: str
@staticmethod
def read(q: dict) -> Minithumbnail:
return Minithumbnail.construct(**q)
```
#### File: api/types/network_statistics.py
```python
from __future__ import annotations
from pydantic import Field
from .network_statistics_entry import NetworkStatisticsEntry
from ..base_object import BaseObject
class NetworkStatistics(BaseObject):
"""
A full list of available network statistic entries
:param since_date: Point in time (Unix timestamp) from which the statistics are collected
:type since_date: :class:`int`
:param entries: Network statistics entries
:type entries: :class:`list[NetworkStatisticsEntry]`
"""
ID: str = Field("networkStatistics", alias="@type")
since_date: int
entries: list[NetworkStatisticsEntry]
@staticmethod
def read(q: dict) -> NetworkStatistics:
return NetworkStatistics.construct(**q)
```
#### File: api/types/notification_group_type.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class NotificationGroupType(BaseObject):
"""
Describes the type of notifications in a notification group
"""
ID: str = Field("notificationGroupType", alias="@type")
class NotificationGroupTypeCalls(NotificationGroupType):
"""
A group containing notifications of type notificationTypeNewCall
"""
ID: str = Field("notificationGroupTypeCalls", alias="@type")
@staticmethod
def read(q: dict) -> NotificationGroupTypeCalls:
return NotificationGroupTypeCalls.construct(**q)
class NotificationGroupTypeMentions(NotificationGroupType):
"""
A group containing notifications of type notificationTypeNewMessage and notificationTypeNewPushMessage with unread mentions of the current user, replies to their messages, or a pinned message
"""
ID: str = Field("notificationGroupTypeMentions", alias="@type")
@staticmethod
def read(q: dict) -> NotificationGroupTypeMentions:
return NotificationGroupTypeMentions.construct(**q)
class NotificationGroupTypeMessages(NotificationGroupType):
"""
A group containing notifications of type notificationTypeNewMessage and notificationTypeNewPushMessage with ordinary unread messages
"""
ID: str = Field("notificationGroupTypeMessages", alias="@type")
@staticmethod
def read(q: dict) -> NotificationGroupTypeMessages:
return NotificationGroupTypeMessages.construct(**q)
class NotificationGroupTypeSecretChat(NotificationGroupType):
"""
A group containing a notification of type notificationTypeNewSecretChat
"""
ID: str = Field("notificationGroupTypeSecretChat", alias="@type")
@staticmethod
def read(q: dict) -> NotificationGroupTypeSecretChat:
return NotificationGroupTypeSecretChat.construct(**q)
```
#### File: api/types/page_block_caption.py
```python
from __future__ import annotations
from pydantic import Field
from .rich_text import RichText
from ..base_object import BaseObject
class PageBlockCaption(BaseObject):
"""
Contains a caption of an instant view web page block, consisting of a text and a trailing credit
:param text: Content of the caption
:type text: :class:`RichText`
:param credit: Block credit (like HTML tag <cite>)
:type credit: :class:`RichText`
"""
ID: str = Field("pageBlockCaption", alias="@type")
text: RichText
credit: RichText
@staticmethod
def read(q: dict) -> PageBlockCaption:
return PageBlockCaption.construct(**q)
```
#### File: api/types/passport_authorization_form.py
```python
from __future__ import annotations
from pydantic import Field
from .passport_required_element import PassportRequiredElement
from ..base_object import BaseObject
class PassportAuthorizationForm(BaseObject):
"""
Contains information about a Telegram Passport authorization form that was requested
:param id: Unique identifier of the authorization form
:type id: :class:`int`
:param required_elements: Information about the Telegram Passport elements that must be provided to complete the form
:type required_elements: :class:`list[PassportRequiredElement]`
:param privacy_policy_url: URL for the privacy policy of the service; may be empty
:type privacy_policy_url: :class:`str`
"""
ID: str = Field("passportAuthorizationForm", alias="@type")
id: int
required_elements: list[PassportRequiredElement]
privacy_policy_url: str
@staticmethod
def read(q: dict) -> PassportAuthorizationForm:
return PassportAuthorizationForm.construct(**q)
```
#### File: api/types/passport_element.py
```python
from __future__ import annotations
from pydantic import Field
from .address import Address
from .identity_document import IdentityDocument
from .personal_details import PersonalDetails
from .personal_document import PersonalDocument
from ..base_object import BaseObject
class PassportElement(BaseObject):
"""
Contains information about a Telegram Passport element
"""
ID: str = Field("passportElement", alias="@type")
class PassportElementAddress(PassportElement):
"""
A Telegram Passport element containing the user's address
:param address: Address
:type address: :class:`Address`
"""
ID: str = Field("passportElementAddress", alias="@type")
address: Address
@staticmethod
def read(q: dict) -> PassportElementAddress:
return PassportElementAddress.construct(**q)
class PassportElementBankStatement(PassportElement):
"""
A Telegram Passport element containing the user's bank statement
:param bank_statement: Bank statement
:type bank_statement: :class:`PersonalDocument`
"""
ID: str = Field("passportElementBankStatement", alias="@type")
bank_statement: PersonalDocument
@staticmethod
def read(q: dict) -> PassportElementBankStatement:
return PassportElementBankStatement.construct(**q)
class PassportElementDriverLicense(PassportElement):
"""
A Telegram Passport element containing the user's driver license
:param driver_license: Driver license
:type driver_license: :class:`IdentityDocument`
"""
ID: str = Field("passportElementDriverLicense", alias="@type")
driver_license: IdentityDocument
@staticmethod
def read(q: dict) -> PassportElementDriverLicense:
return PassportElementDriverLicense.construct(**q)
class PassportElementEmailAddress(PassportElement):
"""
A Telegram Passport element containing the user's email address
:param email_address: Email address
:type email_address: :class:`str`
"""
ID: str = Field("passportElementEmailAddress", alias="@type")
email_address: str
@staticmethod
def read(q: dict) -> PassportElementEmailAddress:
return PassportElementEmailAddress.construct(**q)
class PassportElementIdentityCard(PassportElement):
"""
A Telegram Passport element containing the user's identity card
:param identity_card: Identity card
:type identity_card: :class:`IdentityDocument`
"""
ID: str = Field("passportElementIdentityCard", alias="@type")
identity_card: IdentityDocument
@staticmethod
def read(q: dict) -> PassportElementIdentityCard:
return PassportElementIdentityCard.construct(**q)
class PassportElementInternalPassport(PassportElement):
"""
A Telegram Passport element containing the user's internal passport
:param internal_passport: Internal passport
:type internal_passport: :class:`IdentityDocument`
"""
ID: str = Field("passportElementInternalPassport", alias="@type")
internal_passport: IdentityDocument
@staticmethod
def read(q: dict) -> PassportElementInternalPassport:
return PassportElementInternalPassport.construct(**q)
class PassportElementPassport(PassportElement):
"""
A Telegram Passport element containing the user's passport
:param passport: Passport
:type passport: :class:`IdentityDocument`
"""
ID: str = Field("passportElementPassport", alias="@type")
passport: IdentityDocument
@staticmethod
def read(q: dict) -> PassportElementPassport:
return PassportElementPassport.construct(**q)
class PassportElementPassportRegistration(PassportElement):
"""
A Telegram Passport element containing the user's passport registration pages
:param passport_registration: Passport registration pages
:type passport_registration: :class:`PersonalDocument`
"""
ID: str = Field("passportElementPassportRegistration", alias="@type")
passport_registration: PersonalDocument
@staticmethod
def read(q: dict) -> PassportElementPassportRegistration:
return PassportElementPassportRegistration.construct(**q)
class PassportElementPersonalDetails(PassportElement):
"""
A Telegram Passport element containing the user's personal details
:param personal_details: Personal details of the user
:type personal_details: :class:`PersonalDetails`
"""
ID: str = Field("passportElementPersonalDetails", alias="@type")
personal_details: PersonalDetails
@staticmethod
def read(q: dict) -> PassportElementPersonalDetails:
return PassportElementPersonalDetails.construct(**q)
class PassportElementPhoneNumber(PassportElement):
"""
A Telegram Passport element containing the user's phone number
:param phone_number: Phone number
:type phone_number: :class:`str`
"""
ID: str = Field("passportElementPhoneNumber", alias="@type")
phone_number: str
@staticmethod
def read(q: dict) -> PassportElementPhoneNumber:
return PassportElementPhoneNumber.construct(**q)
class PassportElementRentalAgreement(PassportElement):
"""
A Telegram Passport element containing the user's rental agreement
:param rental_agreement: Rental agreement
:type rental_agreement: :class:`PersonalDocument`
"""
ID: str = Field("passportElementRentalAgreement", alias="@type")
rental_agreement: PersonalDocument
@staticmethod
def read(q: dict) -> PassportElementRentalAgreement:
return PassportElementRentalAgreement.construct(**q)
class PassportElementTemporaryRegistration(PassportElement):
"""
A Telegram Passport element containing the user's temporary registration
:param temporary_registration: Temporary registration
:type temporary_registration: :class:`PersonalDocument`
"""
ID: str = Field("passportElementTemporaryRegistration", alias="@type")
temporary_registration: PersonalDocument
@staticmethod
def read(q: dict) -> PassportElementTemporaryRegistration:
return PassportElementTemporaryRegistration.construct(**q)
class PassportElementUtilityBill(PassportElement):
"""
A Telegram Passport element containing the user's utility bill
:param utility_bill: Utility bill
:type utility_bill: :class:`PersonalDocument`
"""
ID: str = Field("passportElementUtilityBill", alias="@type")
utility_bill: PersonalDocument
@staticmethod
def read(q: dict) -> PassportElementUtilityBill:
return PassportElementUtilityBill.construct(**q)
```
#### File: api/types/passport_required_element.py
```python
from __future__ import annotations
from pydantic import Field
from .passport_suitable_element import PassportSuitableElement
from ..base_object import BaseObject
class PassportRequiredElement(BaseObject):
"""
Contains a description of the required Telegram Passport element that was requested by a service
:param suitable_elements: List of Telegram Passport elements any of which is enough to provide
:type suitable_elements: :class:`list[PassportSuitableElement]`
"""
ID: str = Field("passportRequiredElement", alias="@type")
suitable_elements: list[PassportSuitableElement]
@staticmethod
def read(q: dict) -> PassportRequiredElement:
return PassportRequiredElement.construct(**q)
```
#### File: api/types/passport_suitable_element.py
```python
from __future__ import annotations
from pydantic import Field
from .passport_element_type import PassportElementType
from ..base_object import BaseObject
class PassportSuitableElement(BaseObject):
"""
Contains information about a Telegram Passport element that was requested by a service
:param type_: Type of the element
:type type_: :class:`PassportElementType`
:param is_selfie_required: True, if a selfie is required with the identity document
:type is_selfie_required: :class:`bool`
:param is_translation_required: True, if a certified English translation is required with the document
:type is_translation_required: :class:`bool`
:param is_native_name_required: True, if personal details must include the user's name in the language of their country of residence
:type is_native_name_required: :class:`bool`
"""
ID: str = Field("passportSuitableElement", alias="@type")
type_: PassportElementType = Field(..., alias='type')
is_selfie_required: bool
is_translation_required: bool
is_native_name_required: bool
@staticmethod
def read(q: dict) -> PassportSuitableElement:
return PassportSuitableElement.construct(**q)
```
#### File: api/types/payments_provider_stripe.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class PaymentsProviderStripe(BaseObject):
"""
Stripe payment provider
:param publishable_key: Stripe API publishable key
:type publishable_key: :class:`str`
:param need_country: True, if the user country must be provided
:type need_country: :class:`bool`
:param need_postal_code: True, if the user ZIP/postal code must be provided
:type need_postal_code: :class:`bool`
:param need_cardholder_name: True, if the cardholder name must be provided
:type need_cardholder_name: :class:`bool`
"""
ID: str = Field("paymentsProviderStripe", alias="@type")
publishable_key: str
need_country: bool
need_postal_code: bool
need_cardholder_name: bool
@staticmethod
def read(q: dict) -> PaymentsProviderStripe:
return PaymentsProviderStripe.construct(**q)
```
#### File: api/types/point.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class Point(BaseObject):
"""
A point on a Cartesian plane
:param x: The point's first coordinate
:type x: :class:`float`
:param y: The point's second coordinate
:type y: :class:`float`
"""
ID: str = Field("point", alias="@type")
x: float
y: float
@staticmethod
def read(q: dict) -> Point:
return Point.construct(**q)
```
#### File: api/types/proxy.py
```python
from __future__ import annotations
from pydantic import Field
from .proxy_type import ProxyType
from ..base_object import BaseObject
class Proxy(BaseObject):
"""
Contains information about a proxy server
:param id: Unique identifier of the proxy
:type id: :class:`int`
:param server: Proxy server IP address
:type server: :class:`str`
:param port: Proxy server port
:type port: :class:`int`
:param last_used_date: Point in time (Unix timestamp) when the proxy was last used; 0 if never
:type last_used_date: :class:`int`
:param is_enabled: True, if the proxy is enabled now
:type is_enabled: :class:`bool`
:param type_: Type of the proxy
:type type_: :class:`ProxyType`
"""
ID: str = Field("proxy", alias="@type")
id: int
server: str
port: int
last_used_date: int
is_enabled: bool
type_: ProxyType = Field(..., alias='type')
@staticmethod
def read(q: dict) -> Proxy:
return Proxy.construct(**q)
```
#### File: api/types/proxy_type.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ProxyType(BaseObject):
"""
Describes the type of a proxy server
"""
ID: str = Field("proxyType", alias="@type")
class ProxyTypeHttp(ProxyType):
"""
A HTTP transparent proxy server
:param username: Username for logging in; may be empty
:type username: :class:`str`
:param password: Password for logging in; may be empty
:type password: :class:`str`
:param http_only: Pass true if the proxy supports only HTTP requests and doesn't support transparent TCP connections via HTTP CONNECT method
:type http_only: :class:`bool`
"""
ID: str = Field("proxyTypeHttp", alias="@type")
username: str
password: str
http_only: bool
@staticmethod
def read(q: dict) -> ProxyTypeHttp:
return ProxyTypeHttp.construct(**q)
class ProxyTypeMtproto(ProxyType):
"""
An MTProto proxy server
:param secret: The proxy's secret in hexadecimal encoding
:type secret: :class:`str`
"""
ID: str = Field("proxyTypeMtproto", alias="@type")
secret: str
@staticmethod
def read(q: dict) -> ProxyTypeMtproto:
return ProxyTypeMtproto.construct(**q)
class ProxyTypeSocks5(ProxyType):
"""
A SOCKS5 proxy server
:param username: Username for logging in; may be empty
:type username: :class:`str`
:param password: Password for logging in; may be empty
:type password: :class:`str`
"""
ID: str = Field("proxyTypeSocks5", alias="@type")
username: str
password: str
@staticmethod
def read(q: dict) -> ProxyTypeSocks5:
return ProxyTypeSocks5.construct(**q)
```
#### File: api/types/recommended_chat_filter.py
```python
from __future__ import annotations
from pydantic import Field
from .chat_filter import ChatFilter
from ..base_object import BaseObject
class RecommendedChatFilter(BaseObject):
"""
Describes a recommended chat filter
:param filter_: The chat filter
:type filter_: :class:`ChatFilter`
:param param_description: Chat filter description
:type param_description: :class:`str`
"""
ID: str = Field("recommendedChatFilter", alias="@type")
filter_: ChatFilter = Field(..., alias='filter')
param_description: str
@staticmethod
def read(q: dict) -> RecommendedChatFilter:
return RecommendedChatFilter.construct(**q)
```
#### File: api/types/reply_markup.py
```python
from __future__ import annotations
import typing
from pydantic import Field
from .inline_keyboard_button import InlineKeyboardButton
from .keyboard_button import KeyboardButton
from ..base_object import BaseObject
class ReplyMarkup(BaseObject):
"""
Contains a description of a custom keyboard and actions that can be done with it to quickly reply to bots
"""
ID: str = Field("replyMarkup", alias="@type")
class ReplyMarkupForceReply(ReplyMarkup):
"""
Instructs application to force a reply to this message
:param is_personal: True, if a forced reply must automatically be shown to the current user. For outgoing messages, specify true to show the forced reply only for the mentioned users and for the target user of a reply
:type is_personal: :class:`bool`
:param input_field_placeholder: If non-empty, the placeholder to be shown in the input field when the reply is active; 0-64 characters, defaults to None
:type input_field_placeholder: :class:`str`, optional
"""
ID: str = Field("replyMarkupForceReply", alias="@type")
is_personal: bool
input_field_placeholder: typing.Optional[str] = Field(None, max_length=64)
@staticmethod
def read(q: dict) -> ReplyMarkupForceReply:
return ReplyMarkupForceReply.construct(**q)
class ReplyMarkupInlineKeyboard(ReplyMarkup):
"""
Contains an inline keyboard layout
:param rows: A list of rows of inline keyboard buttons
:type rows: :class:`list[list[InlineKeyboardButton]]`
"""
ID: str = Field("replyMarkupInlineKeyboard", alias="@type")
rows: list[list[InlineKeyboardButton]]
@staticmethod
def read(q: dict) -> ReplyMarkupInlineKeyboard:
return ReplyMarkupInlineKeyboard.construct(**q)
class ReplyMarkupRemoveKeyboard(ReplyMarkup):
"""
Instructs application to remove the keyboard once this message has been received. This kind of keyboard can't be received in an incoming message; instead, UpdateChatReplyMarkup with message_id == 0 will be sent
:param is_personal: True, if the keyboard is removed only for the mentioned users or the target user of a reply
:type is_personal: :class:`bool`
"""
ID: str = Field("replyMarkupRemoveKeyboard", alias="@type")
is_personal: bool
@staticmethod
def read(q: dict) -> ReplyMarkupRemoveKeyboard:
return ReplyMarkupRemoveKeyboard.construct(**q)
class ReplyMarkupShowKeyboard(ReplyMarkup):
"""
Contains a custom keyboard layout to quickly reply to bots
:param rows: A list of rows of bot keyboard buttons
:type rows: :class:`list[list[KeyboardButton]]`
:param resize_keyboard: True, if the application needs to resize the keyboard vertically
:type resize_keyboard: :class:`bool`
:param one_time: True, if the application needs to hide the keyboard after use
:type one_time: :class:`bool`
:param is_personal: True, if the keyboard must automatically be shown to the current user. For outgoing messages, specify true to show the keyboard only for the mentioned users and for the target user of a reply
:type is_personal: :class:`bool`
:param input_field_placeholder: If non-empty, the placeholder to be shown in the input field when the keyboard is active; 0-64 characters, defaults to None
:type input_field_placeholder: :class:`str`, optional
"""
ID: str = Field("replyMarkupShowKeyboard", alias="@type")
rows: list[list[KeyboardButton]]
resize_keyboard: bool
one_time: bool
is_personal: bool
input_field_placeholder: typing.Optional[str] = Field(None, max_length=64)
@staticmethod
def read(q: dict) -> ReplyMarkupShowKeyboard:
return ReplyMarkupShowKeyboard.construct(**q)
```
#### File: api/types/sponsored_message.py
```python
from __future__ import annotations
import typing
from pydantic import Field
from .internal_link_type import InternalLinkType
from .message_content import MessageContent
from ..base_object import BaseObject
class SponsoredMessage(BaseObject):
"""
Describes a sponsored message
:param id: Unique sponsored message identifier
:type id: :class:`int`
:param sponsor_chat_id: Chat identifier
:type sponsor_chat_id: :class:`int`
:param link: An internal link to be opened when the sponsored message is clicked; may be null. If null, the sponsor chat needs to be opened instead, defaults to None
:type link: :class:`InternalLinkType`, optional
:param content: Content of the message
:type content: :class:`MessageContent`
"""
ID: str = Field("sponsoredMessage", alias="@type")
id: int
sponsor_chat_id: int
link: typing.Optional[InternalLinkType] = None
content: MessageContent
@staticmethod
def read(q: dict) -> SponsoredMessage:
return SponsoredMessage.construct(**q)
```
#### File: api/types/storage_statistics_by_file_type.py
```python
from __future__ import annotations
from pydantic import Field
from .file_type import FileType
from ..base_object import BaseObject
class StorageStatisticsByFileType(BaseObject):
"""
Contains the storage usage statistics for a specific file type
:param file_type: File type
:type file_type: :class:`FileType`
:param size: Total size of the files, in bytes
:type size: :class:`int`
:param count: Total number of files
:type count: :class:`int`
"""
ID: str = Field("storageStatisticsByFileType", alias="@type")
file_type: FileType
size: int
count: int
@staticmethod
def read(q: dict) -> StorageStatisticsByFileType:
return StorageStatisticsByFileType.construct(**q)
```
#### File: api/types/storage_statistics.py
```python
from __future__ import annotations
from pydantic import Field
from .storage_statistics_by_chat import StorageStatisticsByChat
from ..base_object import BaseObject
class StorageStatistics(BaseObject):
"""
Contains the exact storage usage statistics split by chats and file type
:param size: Total size of files, in bytes
:type size: :class:`int`
:param count: Total number of files
:type count: :class:`int`
:param by_chat: Statistics split by chats
:type by_chat: :class:`list[StorageStatisticsByChat]`
"""
ID: str = Field("storageStatistics", alias="@type")
size: int
count: int
by_chat: list[StorageStatisticsByChat]
@staticmethod
def read(q: dict) -> StorageStatistics:
return StorageStatistics.construct(**q)
```
#### File: api/types/supergroup_members_filter.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class SupergroupMembersFilter(BaseObject):
"""
Specifies the kind of chat members to return in getSupergroupMembers
"""
ID: str = Field("supergroupMembersFilter", alias="@type")
class SupergroupMembersFilterAdministrators(SupergroupMembersFilter):
"""
Returns the owner and administrators
"""
ID: str = Field("supergroupMembersFilterAdministrators", alias="@type")
@staticmethod
def read(q: dict) -> SupergroupMembersFilterAdministrators:
return SupergroupMembersFilterAdministrators.construct(**q)
class SupergroupMembersFilterBanned(SupergroupMembersFilter):
"""
Returns users banned from the supergroup or channel; can be used only by administrators
:param query: Query to search for
:type query: :class:`str`
"""
ID: str = Field("supergroupMembersFilterBanned", alias="@type")
query: str
@staticmethod
def read(q: dict) -> SupergroupMembersFilterBanned:
return SupergroupMembersFilterBanned.construct(**q)
class SupergroupMembersFilterBots(SupergroupMembersFilter):
"""
Returns bot members of the supergroup or channel
"""
ID: str = Field("supergroupMembersFilterBots", alias="@type")
@staticmethod
def read(q: dict) -> SupergroupMembersFilterBots:
return SupergroupMembersFilterBots.construct(**q)
class SupergroupMembersFilterContacts(SupergroupMembersFilter):
"""
Returns contacts of the user, which are members of the supergroup or channel
:param query: Query to search for
:type query: :class:`str`
"""
ID: str = Field("supergroupMembersFilterContacts", alias="@type")
query: str
@staticmethod
def read(q: dict) -> SupergroupMembersFilterContacts:
return SupergroupMembersFilterContacts.construct(**q)
class SupergroupMembersFilterMention(SupergroupMembersFilter):
"""
Returns users which can be mentioned in the supergroup
:param query: Query to search for
:type query: :class:`str`
:param message_thread_id: If non-zero, the identifier of the current message thread
:type message_thread_id: :class:`int`
"""
ID: str = Field("supergroupMembersFilterMention", alias="@type")
query: str
message_thread_id: int
@staticmethod
def read(q: dict) -> SupergroupMembersFilterMention:
return SupergroupMembersFilterMention.construct(**q)
class SupergroupMembersFilterRecent(SupergroupMembersFilter):
"""
Returns recently active users in reverse chronological order
"""
ID: str = Field("supergroupMembersFilterRecent", alias="@type")
@staticmethod
def read(q: dict) -> SupergroupMembersFilterRecent:
return SupergroupMembersFilterRecent.construct(**q)
class SupergroupMembersFilterRestricted(SupergroupMembersFilter):
"""
Returns restricted supergroup members; can be used only by administrators
:param query: Query to search for
:type query: :class:`str`
"""
ID: str = Field("supergroupMembersFilterRestricted", alias="@type")
query: str
@staticmethod
def read(q: dict) -> SupergroupMembersFilterRestricted:
return SupergroupMembersFilterRestricted.construct(**q)
class SupergroupMembersFilterSearch(SupergroupMembersFilter):
"""
Used to search for supergroup or channel members via a (string) query
:param query: Query to search for
:type query: :class:`str`
"""
ID: str = Field("supergroupMembersFilterSearch", alias="@type")
query: str
@staticmethod
def read(q: dict) -> SupergroupMembersFilterSearch:
return SupergroupMembersFilterSearch.construct(**q)
```
#### File: api/types/test_vector_string.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class TestVectorString(BaseObject):
"""
A simple object containing a vector of strings; for testing only
:param value: Vector of strings
:type value: :class:`list[str]`
"""
ID: str = Field("testVectorString", alias="@type")
value: list[str]
@staticmethod
def read(q: dict) -> TestVectorString:
return TestVectorString.construct(**q)
```
#### File: api/types/text_entities.py
```python
from __future__ import annotations
from pydantic import Field
from .text_entity import TextEntity
from ..base_object import BaseObject
class TextEntities(BaseObject):
"""
Contains a list of text entities
:param entities: List of text entities
:type entities: :class:`list[TextEntity]`
"""
ID: str = Field("textEntities", alias="@type")
entities: list[TextEntity]
@staticmethod
def read(q: dict) -> TextEntities:
return TextEntities.construct(**q)
```
#### File: api/types/text_parse_mode.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class TextParseMode(BaseObject):
"""
Describes the way the text needs to be parsed for TextEntities
"""
ID: str = Field("textParseMode", alias="@type")
class TextParseModeHTML(TextParseMode):
"""
The text uses HTML-style formatting. The same as Telegram Bot API "HTML" parse mode
"""
ID: str = Field("textParseModeHTML", alias="@type")
@staticmethod
def read(q: dict) -> TextParseModeHTML:
return TextParseModeHTML.construct(**q)
class TextParseModeMarkdown(TextParseMode):
"""
The text uses Markdown-style formatting
:param version: Version of the parser: 0 or 1 - Telegram Bot API "Markdown" parse mode, 2 - Telegram Bot API "MarkdownV2" parse mode
:type version: :class:`int`
"""
ID: str = Field("textParseModeMarkdown", alias="@type")
version: int
@staticmethod
def read(q: dict) -> TextParseModeMarkdown:
return TextParseModeMarkdown.construct(**q)
```
#### File: api/types/thumbnail.py
```python
from __future__ import annotations
from pydantic import Field
from .file import File
from .thumbnail_format import ThumbnailFormat
from ..base_object import BaseObject
class Thumbnail(BaseObject):
"""
Represents a thumbnail
:param format: Thumbnail format
:type format: :class:`ThumbnailFormat`
:param width: Thumbnail width
:type width: :class:`int`
:param height: Thumbnail height
:type height: :class:`int`
:param file: The thumbnail
:type file: :class:`File`
"""
ID: str = Field("thumbnail", alias="@type")
format: ThumbnailFormat
width: int
height: int
file: File
@staticmethod
def read(q: dict) -> Thumbnail:
return Thumbnail.construct(**q)
```
#### File: api/types/t_me_url.py
```python
from __future__ import annotations
from pydantic import Field
from .t_me_url_type import TMeUrlType
from ..base_object import BaseObject
class TMeUrl(BaseObject):
"""
Represents a URL linking to an internal Telegram entity
:param url: URL
:type url: :class:`str`
:param type_: Type of the URL
:type type_: :class:`TMeUrlType`
"""
ID: str = Field("tMeUrl", alias="@type")
url: str
type_: TMeUrlType = Field(..., alias='type')
@staticmethod
def read(q: dict) -> TMeUrl:
return TMeUrl.construct(**q)
```
#### File: api/types/updates.py
```python
from __future__ import annotations
from pydantic import Field
from .update import Update
from ..base_object import BaseObject
class Updates(BaseObject):
"""
Contains a list of updates
:param updates: List of updates
:type updates: :class:`list[Update]`
"""
ID: str = Field("updates", alias="@type")
updates: list[Update]
@staticmethod
def read(q: dict) -> Updates:
return Updates.construct(**q)
```
#### File: api/types/user_privacy_setting.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class UserPrivacySetting(BaseObject):
"""
Describes available user privacy settings
"""
ID: str = Field("userPrivacySetting", alias="@type")
class UserPrivacySettingAllowCalls(UserPrivacySetting):
"""
A privacy setting for managing whether the user can be called
"""
ID: str = Field("userPrivacySettingAllowCalls", alias="@type")
@staticmethod
def read(q: dict) -> UserPrivacySettingAllowCalls:
return UserPrivacySettingAllowCalls.construct(**q)
class UserPrivacySettingAllowChatInvites(UserPrivacySetting):
"""
A privacy setting for managing whether the user can be invited to chats
"""
ID: str = Field("userPrivacySettingAllowChatInvites", alias="@type")
@staticmethod
def read(q: dict) -> UserPrivacySettingAllowChatInvites:
return UserPrivacySettingAllowChatInvites.construct(**q)
class UserPrivacySettingAllowFindingByPhoneNumber(UserPrivacySetting):
"""
A privacy setting for managing whether the user can be found by their phone number. Checked only if the phone number is not known to the other user. Can be set only to "Allow contacts" or "Allow all"
"""
ID: str = Field("userPrivacySettingAllowFindingByPhoneNumber", alias="@type")
@staticmethod
def read(q: dict) -> UserPrivacySettingAllowFindingByPhoneNumber:
return UserPrivacySettingAllowFindingByPhoneNumber.construct(**q)
class UserPrivacySettingAllowPeerToPeerCalls(UserPrivacySetting):
"""
A privacy setting for managing whether peer-to-peer connections can be used for calls
"""
ID: str = Field("userPrivacySettingAllowPeerToPeerCalls", alias="@type")
@staticmethod
def read(q: dict) -> UserPrivacySettingAllowPeerToPeerCalls:
return UserPrivacySettingAllowPeerToPeerCalls.construct(**q)
class UserPrivacySettingShowLinkInForwardedMessages(UserPrivacySetting):
"""
A privacy setting for managing whether a link to the user's account is included in forwarded messages
"""
ID: str = Field("userPrivacySettingShowLinkInForwardedMessages", alias="@type")
@staticmethod
def read(q: dict) -> UserPrivacySettingShowLinkInForwardedMessages:
return UserPrivacySettingShowLinkInForwardedMessages.construct(**q)
class UserPrivacySettingShowPhoneNumber(UserPrivacySetting):
"""
A privacy setting for managing whether the user's phone number is visible
"""
ID: str = Field("userPrivacySettingShowPhoneNumber", alias="@type")
@staticmethod
def read(q: dict) -> UserPrivacySettingShowPhoneNumber:
return UserPrivacySettingShowPhoneNumber.construct(**q)
class UserPrivacySettingShowProfilePhoto(UserPrivacySetting):
"""
A privacy setting for managing whether the user's profile photo is visible
"""
ID: str = Field("userPrivacySettingShowProfilePhoto", alias="@type")
@staticmethod
def read(q: dict) -> UserPrivacySettingShowProfilePhoto:
return UserPrivacySettingShowProfilePhoto.construct(**q)
class UserPrivacySettingShowStatus(UserPrivacySetting):
"""
A privacy setting for managing whether the user's online status is visible
"""
ID: str = Field("userPrivacySettingShowStatus", alias="@type")
@staticmethod
def read(q: dict) -> UserPrivacySettingShowStatus:
return UserPrivacySettingShowStatus.construct(**q)
```
#### File: api/types/user_type.py
```python
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class UserType(BaseObject):
"""
Represents the type of a user. The following types are possible: regular users, deleted users and bots
"""
ID: str = Field("userType", alias="@type")
class UserTypeBot(UserType):
"""
A bot (see https://core.telegram.org/bots)
:param can_join_groups: True, if the bot can be invited to basic group and supergroup chats
:type can_join_groups: :class:`bool`
:param can_read_all_group_messages: True, if the bot can read all messages in basic group or supergroup chats and not just those addressed to the bot. In private and channel chats a bot can always read all messages
:type can_read_all_group_messages: :class:`bool`
:param is_inline: True, if the bot supports inline queries
:type is_inline: :class:`bool`
:param inline_query_placeholder: Placeholder for inline queries (displayed on the application input field)
:type inline_query_placeholder: :class:`str`
:param need_location: True, if the location of the user is expected to be sent with every inline query to this bot
:type need_location: :class:`bool`
"""
ID: str = Field("userTypeBot", alias="@type")
can_join_groups: bool
can_read_all_group_messages: bool
is_inline: bool
inline_query_placeholder: str
need_location: bool
@staticmethod
def read(q: dict) -> UserTypeBot:
return UserTypeBot.construct(**q)
class UserTypeDeleted(UserType):
"""
A deleted user or deleted bot. No information on the user besides the user identifier is available. It is not possible to perform any active actions on this type of user
"""
ID: str = Field("userTypeDeleted", alias="@type")
@staticmethod
def read(q: dict) -> UserTypeDeleted:
return UserTypeDeleted.construct(**q)
class UserTypeRegular(UserType):
"""
A regular user
"""
ID: str = Field("userTypeRegular", alias="@type")
@staticmethod
def read(q: dict) -> UserTypeRegular:
return UserTypeRegular.construct(**q)
class UserTypeUnknown(UserType):
"""
No information on the user besides the user identifier is available, yet this user has not been deleted. This object is extremely rare and must be handled like a deleted user. It is not possible to perform any actions on users of this type
"""
ID: str = Field("userTypeUnknown", alias="@type")
@staticmethod
def read(q: dict) -> UserTypeUnknown:
return UserTypeUnknown.construct(**q)
```
#### File: aiotdlib_generator/parser/parser.py
```python
import re
import typing
import urllib.request
from .entities import (
Constructor,
ConstructorShort,
Method,
Parameter,
)
from .utils import upper_first
DEFAULT_TD_API_SCHEME_URL = "https://raw.githubusercontent.com/pylakey/td/master/td/generate/scheme/td_api.tl"
class TDApiParser:
@staticmethod
def parse() -> list[typing.Union[Constructor, Method]]:
abstract_class_docs_regex = re.compile(r"^//@class (?P<name>[^@]*) @description (?P<description>.*)$")
description_regex = re.compile(r"^//@description (?P<description>.*)$")
parameter_description_regex = re.compile(r"^//@(?P<name>.*?) (?P<description>.*)$")
entity_regex = re.compile(r'^(?P<name>\w+)\s(?P<args>.*)=\s(?P<return_type>\w+);$')
args_regex = re.compile(r"(?P<name>\w+):(?P<type>[\w<>]+)")
param_length_constraint = re.compile(r"(?P<min_length>\d+)-(?P<max_length>\d+) characters")
nullability_constraint = re.compile(r".*may be null.*")
scheme = urllib.request.urlopen(DEFAULT_TD_API_SCHEME_URL).read().decode('utf-8')
# Some cleaning for better parsing
inline_parameter_regex = re.compile(r'(?!(@description|@class))(@\w+)')
empty_slashes_regex = re.compile(r'\n//\s*$', re.MULTILINE)
scheme = scheme.replace('\n//-', ' ')
scheme = inline_parameter_regex.sub(r'\n//\2', scheme)
scheme = scheme.replace('////', '//')
scheme = empty_slashes_regex.sub('', scheme)
methods = {}
constructors = {}
is_functions_section = False
current_entity_description: str = ""
current_entity_params_descriptions: dict[str, str] = {}
for line in [line.strip() for line in scheme.splitlines()[14:] if len(line) > 0]:
# After line '---functions---' only methods are described
if line == '---functions---':
is_functions_section = True
continue
abstract_class_match = abstract_class_docs_regex.match(line)
# Abstract classes presented as
# //@class <ClassName> @description <description>
if bool(abstract_class_match):
class_name = upper_first(abstract_class_match.group('name'))
class_description = abstract_class_match.group('description')
constructors[class_name] = Constructor(
name=class_name,
doc=class_description,
is_abstract=True
)
continue
description_match = description_regex.match(line)
if bool(description_match):
current_entity_description = description_match.group('description')
continue
parameter_description_match = parameter_description_regex.match(line)
if bool(parameter_description_match):
param_name = parameter_description_match.group('name')
param_description = parameter_description_match.group('description')
current_entity_params_descriptions[param_name] = param_description
continue
entity_match = entity_regex.match(line)
if bool(entity_match):
entity_name = upper_first(entity_match.group('name'))
entity_return_type = upper_first(entity_match.group('return_type'))
entity_uf_return_type = upper_first(entity_return_type)
entity_parameters: list[Parameter] = []
for arg_name, arg_type in args_regex.findall(entity_match.group('args')):
if arg_name in ['description', 'class']:
arg_name = f'param_{arg_name}'
arg_description = current_entity_params_descriptions.get(arg_name)
arg_nullable = False
arg_min_length = None
arg_max_length = None
if ";" in arg_description:
# Parsing parameter constraints
# https://github.com/tdlib/td/issues/1016#issuecomment-618959102
for c in arg_description.split(";"):
c = c.strip()
if nullability_constraint.match(c):
arg_nullable = True
param_length_constraint_match = param_length_constraint.match(c)
if bool(param_length_constraint_match):
arg_min_length = int(param_length_constraint_match.group('min_length'))
if arg_min_length == 0:
arg_nullable = True
arg_max_length = int(param_length_constraint_match.group('max_length'))
entity_parameters.append(
Parameter(
name=arg_name,
type=arg_type,
doc=arg_description,
nullable=arg_nullable,
min_length=arg_min_length,
max_length=arg_max_length,
)
)
if is_functions_section:
methods[entity_name] = Method(
name=entity_name,
doc=current_entity_description,
parameters=entity_parameters,
return_type=(
constructors[entity_uf_return_type]
if entity_uf_return_type in constructors
else entity_return_type
)
)
else:
if entity_uf_return_type in constructors:
parent = constructors[entity_uf_return_type]
entity = Constructor(
name=entity_name,
doc=current_entity_description,
parameters=entity_parameters,
parent_class=ConstructorShort(
name=parent.name,
doc=parent.doc,
parameters=parent.parameters,
is_abstract=parent.is_abstract
),
)
parent.subclasses.append(entity)
else:
constructors[entity_name] = Constructor(
name=entity_name,
doc=current_entity_description,
parameters=entity_parameters,
)
current_entity_params_descriptions = {}
current_entity_description = ""
continue
raise RuntimeError('Something goes wrong!')
# Separately process cross-dependencies to avoid circular import error
cross_deps: dict[str, set[str]] = {}
for constructor in sorted(constructors.values(), key=lambda x: x.name):
for parameter in constructor.parameters:
parameter_constructor = constructors.get(parameter.import_type)
# Skip core types
if not bool(parameter_constructor):
continue
# Skip subclasses as they will be checked above in parent class processing
if bool(parameter_constructor.parent_class):
continue
# Check parameters of constructor parameters
for p in parameter_constructor.parameters:
if p.import_type == constructor.name:
if parameter.import_type not in cross_deps:
cross_deps[constructor.name] = set()
cross_deps[constructor.name].add(constructor.name)
# Check parameter constructor subclasses' parameters
for subclass in parameter_constructor.subclasses:
for subclass_parameter in subclass.parameters:
if subclass_parameter.import_type == constructor.name:
if parameter.import_type not in cross_deps:
cross_deps[parameter.import_type] = set()
cross_deps[parameter.import_type].add(constructor.name)
for constructor_name, cross_deps_constructors_names in cross_deps.items():
for name in cross_deps_constructors_names:
dep_constructor = constructors.pop(name)
constructors[constructor_name].cross_deps.append(dep_constructor)
methods_list: list[Method] = list(methods.values())
constructors_list: list[Constructor] = list(constructors.values())
# Sorting subclasses and cross_deps lists
for constructor in constructors_list:
constructor.subclasses.sort(key=lambda x: x.name)
constructor.cross_deps.sort(key=lambda x: x.name)
return list(sorted([*methods_list, *constructors_list], key=lambda x: x.name))
``` |
Subsets and Splits