prompt
large_stringlengths 72
9.34k
| completion
large_stringlengths 0
7.61k
|
---|---|
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
<|fim_middle|>
<|fim▁end|> | args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented") |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
<|fim_middle|>
else:
print("not implemented")
<|fim▁end|> | pymol_processing(ensamble_output) |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
<|fim_middle|>
<|fim▁end|> | print("not implemented") |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def <|fim_middle|>():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
<|fim▁end|> | argument |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def <|fim_middle|>(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
<|fim▁end|> | rmsd_pymol |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def <|fim_middle|>():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
<|fim▁end|> | searching_pdb |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def <|fim_middle|>(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
<|fim▁end|> | argument_processing |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def <|fim_middle|>():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
<|fim▁end|> | using_adderror |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def <|fim_middle|>(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
<|fim▁end|> | find_index |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def <|fim_middle|>():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
<|fim▁end|> | ensamble_fit |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def <|fim_middle|>():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def pymol_processing(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
<|fim▁end|> | result_rmsd |
<|file_name|>test_caxs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from os import listdir
import os
import re
import sys
from argparse import ArgumentParser
import random
import subprocess
from math import sqrt
import ast
from adderror import adderror
"""ENSAMBLE, -d directory -n number of models """
"""-k number of selected structure"""
"""-r repet of program"""
files = []
pdb_files = []
exp_file = []
list_of_random_items_modified = []
list_of_random_items = []
selected_files_for_ensamble = []
def argument():
parser = ArgumentParser()
parser.add_argument("-d", "--dir", dest="myDirVariable",
help="Choose dir", metavar="DIR", required=True)
parser.add_argument("-n", metavar='N', type=int,
dest="number_of_selected_files",
help="Number of selected structure",
required=True)
parser.add_argument("-k", metavar='K', type=int,
dest="k_number_of_options",
help="Number of possibility structure, less then selected files",
required=True)
parser.add_argument("-q", metavar='Q', type=int,
dest="mixing_koeficient", help="Mixing koeficient",
default=1)
parser.add_argument("-r", metavar='R', type=int,
dest="repeat", help="Number of repetitions",
default=1)
parser.add_argument("--verbose", help="increase output verbosity",
action="store_true")
args = parser.parse_args()
global files
global list_of_random_items_modified
files = listdir(args.myDirVariable)
list_of_random_items_modified = [None]*args.k_number_of_options
return(args)
def rmsd_pymol(structure_1, structure_2):
with open("file_for_pymol.pml", "w") as file_for_pymol:
file_for_pymol.write("""
load {s1}
load {s2}
align {s3}, {s4}
quit
""".format(s1=structure_1, s2=structure_2,
s3=os.path.splitext(structure_1)[0],
s4=os.path.splitext(structure_2)[0]))
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for home:
out_pymol = subprocess.check_output(" pymol -c file_for_pymol.pml | grep Executive:", shell=True)
#part for META:out_pymol = subprocess.check_output("module add pymol-1.8.2.1-gcc; pymol -c file_for_pymol.pml | grep Executive:;module rm pymol-1.8.2.1-gcc", shell=True)
rmsd = float(out_pymol[out_pymol.index(b'=')+1:out_pymol.index(b'(')-1])
print('RMSD ', structure_1, ' and ', structure_2, ' = ', rmsd)
return rmsd
def searching_pdb():
for line in files:
line = line.rstrip()
if re.search('.pdb$', line):
#if re.search('.pdb.dat', line):
pdb_files.append(line)
#if re.search('exp.dat', line):
#print('experimental file', line)
# exp_file.append(line)
total_number_of_pdb_files = len(pdb_files)
return(total_number_of_pdb_files)
def argument_processing(args, total_number_of_pdb_files):
#print(args)
print('Parametrs ')
print('Total number of pdb files', total_number_of_pdb_files)
if total_number_of_pdb_files < args.number_of_selected_files:
print("Number od pdb files is ", total_number_of_pdb_files)
sys.exit(0)
if args.k_number_of_options > args.number_of_selected_files:
print("Number of selected structure is only", args.number_of_selected_files)
sys.exit(0)
if args.mixing_koeficient != 1:
print ("For q>1 is not implemented now \n")
sys.exit(0)
print('Files from directory', args.myDirVariable)
print('The number of the selected files',
args.number_of_selected_files)
print('The number of selected options', args.k_number_of_options)
print('All pdb.dat files \n', pdb_files)
global selected_files_for_ensamble
selected_files_for_ensamble = random.sample(pdb_files,
args.number_of_selected_files)
print('Randomly selected files: \n', selected_files_for_ensamble)
global list_of_random_items
list_of_random_items = random.sample(selected_files_for_ensamble,
args.k_number_of_options)
print('Randomly selected files: \n', list_of_random_items)
def using_adderror():
for i in range(args.k_number_of_options):
list_of_random_items_modified[i] = adderror("exp.dat",list_of_random_items[i]+'.dat')
str1 = ''.join(str(e)+"\n" for e in list_of_random_items_modified)
str2 = ''.join(str(e)+"\n" for e in list_of_random_items)
print(str1)
print(str2)
return(str1, str2)
def find_index(strings):
for e in list_of_random_items:
value_of_index[e] = selected_files_for_ensamble.index(e)
print(selected_files_for_ensamble.index(e))
with open("input_for_ensamble_fit", "w") as f:
f.write(strings[0])
def ensamble_fit():
ensable_output=[None]*args.k_number_of_options
for i in range(k_number_of_options):
command = "/storage/brno3-cerit/home/krab1k/saxs-ensamble-fit/core/ensamble-fit -L -p /storage/brno2/home/petrahrozkova/SAXS/mod -n " + str(args.number_of_selected_files) + " -m /storage/brno2/home/petrahrozkova/SAXS/" +list_of_random_items_modified[i]+".dat"
subprocess.call(command,shell=True)
ensable_output[i] = result_rmsd()
return(ensable_output)
def result_rmsd():
with open('result', 'r') as f:
(f.readline())
result = f.readline()
values_of_index_result = result.split(',')[4:]
return(values_of_index_result)
def <|fim_middle|>(ensable_output):
sum_rmsd = 0
values_of_index_result = ensable_output[0]
dictionary_index_and_structure = dict()
for i, j in enumerate(selected_files_for_ensamble):
dictionary_index_and_structure[i] = j
for i, j in enumerate(values_of_index_result):
f = float(j)
if f != 0:
computed_rmsd = rmsd_pymol(selected_files_for_ensamble[i],
list_of_random_items[0])
print('Adjusted rmsd ', f*computed_rmsd, '\n')
sum_rmsd += f*computed_rmsd
print('Sum of RMSD', sum_rmsd)
if __name__ == '__main__':
args = argument()
total_number_of_pdb_files = searching_pdb()
for i in range(args.repeat):
argument_processing(args, total_number_of_pdb_files)
strings = using_adderror()
#find_index(strings)
# ensamble_output = ensamble-fit()
ensamble_output=[None]*2
ensamble_output[0] = result_rmsd()
if args.k_number_of_options ==1:
pymol_processing(ensamble_output)
else:
print("not implemented")
<|fim▁end|> | pymol_processing |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Initialize Flask app
"""
from flask import Flask
import os
from flask_debugtoolbar import DebugToolbarExtension
from werkzeug.debug import DebuggedApplication
app = Flask('application')
if os.getenv('FLASK_CONF') == 'DEV':<|fim▁hole|>
# Google app engine mini profiler
# https://github.com/kamens/gae_mini_profiler
app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True)
from gae_mini_profiler import profiler, templatetags
@app.context_processor
def inject_profiler():
return dict(profiler_includes=templatetags.profiler_includes())
app.wsgi_app = profiler.ProfilerWSGIMiddleware(app.wsgi_app)
elif os.getenv('FLASK_CONF') == 'TEST':
app.config.from_object('application.settings.Testing')
else:
app.config.from_object('application.settings.Production')
# Enable jinja2 loop controls extension
app.jinja_env.add_extension('jinja2.ext.loopcontrols')
# Pull in URL dispatch routes
import urls<|fim▁end|> | # Development settings
app.config.from_object('application.settings.Development')
# Flask-DebugToolbar
toolbar = DebugToolbarExtension(app) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Initialize Flask app
"""
from flask import Flask
import os
from flask_debugtoolbar import DebugToolbarExtension
from werkzeug.debug import DebuggedApplication
app = Flask('application')
if os.getenv('FLASK_CONF') == 'DEV':
# Development settings
app.config.from_object('application.settings.Development')
# Flask-DebugToolbar
toolbar = DebugToolbarExtension(app)
# Google app engine mini profiler
# https://github.com/kamens/gae_mini_profiler
app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True)
from gae_mini_profiler import profiler, templatetags
@app.context_processor
def inject_profiler():
<|fim_middle|>
app.wsgi_app = profiler.ProfilerWSGIMiddleware(app.wsgi_app)
elif os.getenv('FLASK_CONF') == 'TEST':
app.config.from_object('application.settings.Testing')
else:
app.config.from_object('application.settings.Production')
# Enable jinja2 loop controls extension
app.jinja_env.add_extension('jinja2.ext.loopcontrols')
# Pull in URL dispatch routes
import urls
<|fim▁end|> | return dict(profiler_includes=templatetags.profiler_includes()) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Initialize Flask app
"""
from flask import Flask
import os
from flask_debugtoolbar import DebugToolbarExtension
from werkzeug.debug import DebuggedApplication
app = Flask('application')
if os.getenv('FLASK_CONF') == 'DEV':
# Development settings
<|fim_middle|>
elif os.getenv('FLASK_CONF') == 'TEST':
app.config.from_object('application.settings.Testing')
else:
app.config.from_object('application.settings.Production')
# Enable jinja2 loop controls extension
app.jinja_env.add_extension('jinja2.ext.loopcontrols')
# Pull in URL dispatch routes
import urls
<|fim▁end|> | app.config.from_object('application.settings.Development')
# Flask-DebugToolbar
toolbar = DebugToolbarExtension(app)
# Google app engine mini profiler
# https://github.com/kamens/gae_mini_profiler
app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True)
from gae_mini_profiler import profiler, templatetags
@app.context_processor
def inject_profiler():
return dict(profiler_includes=templatetags.profiler_includes())
app.wsgi_app = profiler.ProfilerWSGIMiddleware(app.wsgi_app) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Initialize Flask app
"""
from flask import Flask
import os
from flask_debugtoolbar import DebugToolbarExtension
from werkzeug.debug import DebuggedApplication
app = Flask('application')
if os.getenv('FLASK_CONF') == 'DEV':
# Development settings
app.config.from_object('application.settings.Development')
# Flask-DebugToolbar
toolbar = DebugToolbarExtension(app)
# Google app engine mini profiler
# https://github.com/kamens/gae_mini_profiler
app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True)
from gae_mini_profiler import profiler, templatetags
@app.context_processor
def inject_profiler():
return dict(profiler_includes=templatetags.profiler_includes())
app.wsgi_app = profiler.ProfilerWSGIMiddleware(app.wsgi_app)
elif os.getenv('FLASK_CONF') == 'TEST':
<|fim_middle|>
else:
app.config.from_object('application.settings.Production')
# Enable jinja2 loop controls extension
app.jinja_env.add_extension('jinja2.ext.loopcontrols')
# Pull in URL dispatch routes
import urls
<|fim▁end|> | app.config.from_object('application.settings.Testing') |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Initialize Flask app
"""
from flask import Flask
import os
from flask_debugtoolbar import DebugToolbarExtension
from werkzeug.debug import DebuggedApplication
app = Flask('application')
if os.getenv('FLASK_CONF') == 'DEV':
# Development settings
app.config.from_object('application.settings.Development')
# Flask-DebugToolbar
toolbar = DebugToolbarExtension(app)
# Google app engine mini profiler
# https://github.com/kamens/gae_mini_profiler
app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True)
from gae_mini_profiler import profiler, templatetags
@app.context_processor
def inject_profiler():
return dict(profiler_includes=templatetags.profiler_includes())
app.wsgi_app = profiler.ProfilerWSGIMiddleware(app.wsgi_app)
elif os.getenv('FLASK_CONF') == 'TEST':
app.config.from_object('application.settings.Testing')
else:
<|fim_middle|>
# Enable jinja2 loop controls extension
app.jinja_env.add_extension('jinja2.ext.loopcontrols')
# Pull in URL dispatch routes
import urls
<|fim▁end|> | app.config.from_object('application.settings.Production') |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Initialize Flask app
"""
from flask import Flask
import os
from flask_debugtoolbar import DebugToolbarExtension
from werkzeug.debug import DebuggedApplication
app = Flask('application')
if os.getenv('FLASK_CONF') == 'DEV':
# Development settings
app.config.from_object('application.settings.Development')
# Flask-DebugToolbar
toolbar = DebugToolbarExtension(app)
# Google app engine mini profiler
# https://github.com/kamens/gae_mini_profiler
app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True)
from gae_mini_profiler import profiler, templatetags
@app.context_processor
def <|fim_middle|>():
return dict(profiler_includes=templatetags.profiler_includes())
app.wsgi_app = profiler.ProfilerWSGIMiddleware(app.wsgi_app)
elif os.getenv('FLASK_CONF') == 'TEST':
app.config.from_object('application.settings.Testing')
else:
app.config.from_object('application.settings.Production')
# Enable jinja2 loop controls extension
app.jinja_env.add_extension('jinja2.ext.loopcontrols')
# Pull in URL dispatch routes
import urls
<|fim▁end|> | inject_profiler |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()<|fim▁hole|> if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()<|fim▁end|> | |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
<|fim_middle|>
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
<|fim_middle|>
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
<|fim_middle|>
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | """Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res]) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
<|fim_middle|>
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | """Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's' |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
<|fim_middle|>
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | """Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res]) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
<|fim_middle|>
<|fim▁end|> | """The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save() |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
<|fim_middle|>
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
<|fim_middle|>
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | """Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
<|fim_middle|>
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | """Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
<|fim_middle|>
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save() |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
<|fim_middle|>
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | return rsc.delete() |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
<|fim_middle|>
<|fim▁end|> | for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save() |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
<|fim_middle|>
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | self.path += 's' |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
<|fim_middle|>
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | raise ValueError('no id given') |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
<|fim_middle|>
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | id = '/' + id |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
<|fim_middle|>
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | return self.rsc(res, self.api) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
<|fim_middle|>
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | self.session.auth = (user, password) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
<|fim_middle|>
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
<|fim_middle|>
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
<|fim_middle|>
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
<|fim_middle|>
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def <|fim_middle|>(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | __init__ |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def <|fim_middle|>(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | __init__ |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def <|fim_middle|>(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | __call__ |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def <|fim_middle|>(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | __init__ |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def <|fim_middle|>(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | _req |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def <|fim_middle|>(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | __getattr__ |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def <|fim_middle|>(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | create |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def <|fim_middle|>(self, rsc):
return rsc.delete()
def update(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | delete |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""A client for the REST API of imeji instances."""
import logging
from collections import OrderedDict
import requests
from six import string_types
from pyimeji import resource
from pyimeji.config import Config
log = logging.getLogger(__name__)
class ImejiError(Exception):
def __init__(self, message, error):
super(ImejiError, self).__init__(message)
self.error = error.get('error') if isinstance(error, dict) else error
class GET(object):
"""Handle GET requests.
This includes requests
- to retrieve single objects,
- to fetch lists of object references (which are returned as `OrderedDict` mapping
object `id` to additional metadata present in the response).
"""
def __init__(self, api, name):
"""Initialize a handler.
:param api: An Imeji API instance.
:param name: Name specifying the kind of object(s) to retrieve. We check whether\
this name has a plural "s" to determine if a list is to be retrieved.
"""
self._list = name.endswith('s')
self.rsc = getattr(resource, (name[:-1] if self._list else name).capitalize())
self.api = api
self.name = name
self.path = name
if not self._list:
self.path += 's'
def __call__(self, id='', **kw):
"""Calling the handler initiates an HTTP request to the imeji server.
:param id: If a single object is to be retrieved it must be specified by id.
:return: An OrderedDict mapping id to additional metadata for lists, a \
:py:class:`pyimeji.resource.Resource` instance for single objects.
"""
if not self._list and not id:
raise ValueError('no id given')
if id:
id = '/' + id
res = self.api._req('/%s%s' % (self.path, id), params=kw)
if not self._list:
return self.rsc(res, self.api)
return OrderedDict([(d['id'], d) for d in res])
class Imeji(object):
"""The client.
>>> api = Imeji(service_url='http://demo.imeji.org/imeji/')
>>> collection_id = list(api.collections().keys())[0]
>>> collection = api.collection(collection_id)
>>> collection = api.create('collection', title='the new collection')
>>> item = collection.add_item(fetchUrl='http://example.org')
>>> item.delete()
"""
def __init__(self, cfg=None, service_url=None):
self.cfg = cfg or Config()
self.service_url = service_url or self.cfg.get('service', 'url')
user = self.cfg.get('service', 'user', default=None)
password = self.cfg.get('service', 'password', default=None)
self.session = requests.Session()
if user and password:
self.session.auth = (user, password)
def _req(self, path, method='get', json=True, assert_status=200, **kw):
"""Make a request to the API of an imeji instance.
:param path: HTTP path.
:param method: HTTP method.
:param json: Flag signalling whether the response should be treated as JSON.
:param assert_status: Expected HTTP response status of a successful request.
:param kw: Additional keyword parameters will be handed through to the \
appropriate function of the requests library.
:return: The return value of the function of the requests library or a decoded \
JSON object/array.
"""
method = getattr(self.session, method.lower())
res = method(self.service_url + '/rest' + path, **kw)
status_code = res.status_code
if json:
try:
res = res.json()
except ValueError: # pragma: no cover
log.error(res.text[:1000])
raise
if assert_status:
if status_code != assert_status:
log.error(
'got HTTP %s, expected HTTP %s' % (status_code, assert_status))
log.error(res.text[:1000] if hasattr(res, 'text') else res)
raise ImejiError('Unexpected HTTP status code', res)
return res
def __getattr__(self, name):
"""Names of resource classes are accepted and resolved as dynamic attribute names.
This allows convenient retrieval of resources as api.<resource-class>(id=<id>),
or api.<resource-class>s(q='x').
"""
return GET(self, name)
def create(self, rsc, **kw):
if isinstance(rsc, string_types):
cls = getattr(resource, rsc.capitalize())
rsc = cls(kw, self)
return rsc.save()
def delete(self, rsc):
return rsc.delete()
def <|fim_middle|>(self, rsc, **kw):
for k, v in kw.items():
setattr(rsc, k, v)
return rsc.save()
<|fim▁end|> | update |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()<|fim▁hole|>
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]<|fim▁end|> | |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
<|fim_middle|>
<|fim▁end|> | def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:] |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
<|fim_middle|>
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
<|fim_middle|>
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon() |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
<|fim_middle|>
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon() |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
<|fim_middle|>
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | self._stop_daemon()
return self._start_daemon() |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
<|fim_middle|>
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
<|fim_middle|>
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
<|fim_middle|>
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush() |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
<|fim_middle|>
<|fim▁end|> | for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:] |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
<|fim_middle|>
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | num_switches = lswitch_create_args.get("amount", 1) |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
<|fim_middle|>
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | start_cidr = netaddr.IPNetwork(start_cidr) |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
<|fim_middle|>
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
<|fim_middle|>
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | name = self.generate_random_name() |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
<|fim_middle|>
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | lswitch["cidr"] = cidr |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
<|fim_middle|>
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | ovn_nbctl.flush()
flush_count = batch |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
<|fim_middle|>
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | ovn_nbctl.flush()
flush_count = batch |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def <|fim_middle|>(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | _get_ovn_controller |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def <|fim_middle|>(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | _start_daemon |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def <|fim_middle|>(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | _stop_daemon |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def <|fim_middle|>(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | _restart_daemon |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def <|fim_middle|>(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | _create_lswitches |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def <|fim_middle|>(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | _create_routers |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def <|fim_middle|>(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def _connect_networks_to_routers(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | _connect_network_to_router |
<|file_name|>ovnclients.py<|end_file_name|><|fim▁begin|># Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from rally.common import logging
from rally.common.utils import RandomNameGeneratorMixin
from rally_ovs.plugins.ovs import ovsclients
from rally_ovs.plugins.ovs import utils
LOG = logging.getLogger(__name__)
class OvnClientMixin(ovsclients.ClientsMixin, RandomNameGeneratorMixin):
def _get_ovn_controller(self, install_method="sandbox"):
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", install_method,
self.context['controller']['host_container'])
ovn_nbctl.set_daemon_socket(self.context.get("daemon_socket", None))
return ovn_nbctl
def _start_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
return ovn_nbctl.start_daemon()
def _stop_daemon(self):
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.stop_daemon()
def _restart_daemon(self):
self._stop_daemon()
return self._start_daemon()
def _create_lswitches(self, lswitch_create_args, num_switches=-1):
self.RESOURCE_NAME_FORMAT = "lswitch_XXXXXX_XXXXXX"
if (num_switches == -1):
num_switches = lswitch_create_args.get("amount", 1)
batch = lswitch_create_args.get("batch", num_switches)
start_cidr = lswitch_create_args.get("start_cidr", "")
if start_cidr:
start_cidr = netaddr.IPNetwork(start_cidr)
mcast_snoop = lswitch_create_args.get("mcast_snoop", "true")
mcast_idle = lswitch_create_args.get("mcast_idle_timeout", 300)
mcast_table_size = lswitch_create_args.get("mcast_table_size", 2048)
LOG.info("Create lswitches method: %s" % self.install_method)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lswitches = []
for i in range(num_switches):
name = self.generate_random_name()
if start_cidr:
cidr = start_cidr.next(i)
name = "lswitch_%s" % cidr
else:
name = self.generate_random_name()
other_cfg = {
'mcast_snoop': mcast_snoop,
'mcast_idle_timeout': mcast_idle,
'mcast_table_size': mcast_table_size
}
lswitch = ovn_nbctl.lswitch_add(name, other_cfg)
if start_cidr:
lswitch["cidr"] = cidr
LOG.info("create %(name)s %(cidr)s" % \
{"name": name, "cidr": lswitch.get("cidr", "")})
lswitches.append(lswitch)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lswitches
def _create_routers(self, router_create_args):
self.RESOURCE_NAME_FORMAT = "lrouter_XXXXXX_XXXXXX"
amount = router_create_args.get("amount", 1)
batch = router_create_args.get("batch", 1)
ovn_nbctl = self._get_ovn_controller(self.install_method)
ovn_nbctl.enable_batch_mode()
flush_count = batch
lrouters = []
for i in range(amount):
name = self.generate_random_name()
lrouter = ovn_nbctl.lrouter_add(name)
lrouters.append(lrouter)
flush_count -= 1
if flush_count < 1:
ovn_nbctl.flush()
flush_count = batch
ovn_nbctl.flush() # ensure all commands be run
ovn_nbctl.enable_batch_mode(False)
return lrouters
def _connect_network_to_router(self, router, network):
LOG.info("Connect network %s to router %s" % (network["name"], router["name"]))
ovn_nbctl = self.controller_client("ovn-nbctl")
ovn_nbctl.set_sandbox("controller-sandbox", self.install_method,
self.context['controller']['host_container'])
ovn_nbctl.enable_batch_mode(False)
base_mac = [i[:2] for i in self.task["uuid"].split('-')]
base_mac[0] = str(hex(int(base_mac[0], 16) & 254))
base_mac[3:] = ['00']*3
mac = utils.get_random_mac(base_mac)
lrouter_port = ovn_nbctl.lrouter_port_add(router["name"], network["name"], mac,
str(network["cidr"]))
ovn_nbctl.flush()
switch_router_port = "rp-" + network["name"]
lport = ovn_nbctl.lswitch_port_add(network["name"], switch_router_port)
ovn_nbctl.db_set('Logical_Switch_Port', switch_router_port,
('options', {"router-port":network["name"]}),
('type', 'router'),
('address', 'router'))
ovn_nbctl.flush()
def <|fim_middle|>(self, lnetworks, lrouters, networks_per_router):
for lrouter in lrouters:
LOG.info("Connect %s networks to router %s" % (networks_per_router, lrouter["name"]))
for lnetwork in lnetworks[:networks_per_router]:
LOG.info("connect networks %s cidr %s" % (lnetwork["name"], lnetwork["cidr"]))
self._connect_network_to_router(lrouter, lnetwork)
lnetworks = lnetworks[networks_per_router:]
<|fim▁end|> | _connect_networks_to_routers |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
<|fim▁hole|> if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
<|fim_middle|>
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId'] |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
<|fim_middle|>
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
<|fim_middle|>
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
<|fim_middle|>
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
<|fim_middle|>
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
<|fim_middle|>
<|fim▁end|> | w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
<|fim_middle|>
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | return p['teamId'] |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
<|fim_middle|>
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | summoners.append(p['summonerId']) |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
<|fim_middle|>
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | matchlist = response['matches'] |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
<|fim_middle|>
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0 |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
<|fim_middle|>
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | sleep(10)
rqs = 0 |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
<|fim_middle|>
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1 |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
<|fim_middle|>
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1 |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
<|fim_middle|>
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | friendships[id][id2] = 1 |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
<|fim_middle|>
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | friendships[id2][id] = 1 |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
<|fim_middle|>
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | premades.append(group) |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
<|fim_middle|>
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | finGroup = finGroup | group2 |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
<|fim_middle|>
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | finPremades.append(finGroup) |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
<|fim_middle|>
return premadesNames<|fim▁end|> | for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames) |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def <|fim_middle|>( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | getTeamOfSummoner |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def <|fim_middle|>( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | getSummonerIdsOfOpponentTeam |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def <|fim_middle|>( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | queryPastGameIdSets |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def <|fim_middle|>( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | computeFriendship |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def <|fim_middle|>( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def getPremades( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | computePremades |
<|file_name|>friendship.py<|end_file_name|><|fim▁begin|>from riotwatcher import *
from time import sleep
import logging
log = logging.getLogger('log')
def getTeamOfSummoner( summonerId, game ):
for p in game['participants']:
if p['summonerId'] == summonerId:
return p['teamId']
def getSummonerIdsOfOpponentTeam( summonerId, game ):
teamId = getTeamOfSummoner(summonerId, game)
summoners = []
for p in game['participants']:
if p['teamId'] != teamId:
summoners.append(p['summonerId'])
return summoners
def queryPastGameIdSets( w, summonerIds, past10 ):
sets = {}
rqs = 0
for id in summonerIds:
response = w.get_match_list(id);
matchlist = []
if 'matches' in response:
matchlist = response['matches']
gamelist = []
if past10:
gamelist = w.get_recent_games(id)['games']
rqs += 2
if rqs >= 8:
sleep(10)
rqs = 0
log.debug('matches of summoner '+str(id)+': '+str(len(matchlist)))
s = set()
for match in matchlist:
s.add(match['matchId'])
for game in gamelist:
s.add(game['gameId'])
sets[id] = s
return sets
def computeFriendship( IdSets ):
searchedSets = set()
friendships = {}
for id in IdSets:
friendships[id] = {}
for id in IdSets:
searchedSets.add(id)
for gameId in IdSets[id]:
for id2 in IdSets:
if not id2 in searchedSets:
if gameId in IdSets[id2]:
if not id2 in friendships[id]:
friendships[id][id2] = 1
if not id in friendships[id2]:
friendships[id2][id] = 1
friendships[id][id2] += 1
friendships[id2][id] += 1
return friendships
def computePremades( friendshipRelations ):
premades = []
for id in friendshipRelations:
group = set(friendshipRelations[id].keys())
group.add(id)
if group not in premades:
premades.append(group)
finPremades = []
for group1 in premades:
finGroup = group1
for group2 in premades:
if group1 != group2 and len(group1 & group2) > 0:
finGroup = finGroup | group2
if finGroup not in finPremades:
finPremades.append(finGroup)
return finPremades
def <|fim_middle|>( summonerName, lolAPIKey, past10 ):
w = riotwatcher.RiotWatcher(lolAPIKey, default_region=riotwatcher.EUROPE_WEST)
id = w.get_summoner(name=summonerName)['id']
game = w.get_current_game(id)
participants = game['participants']
idToParticipantsMap = {}
for p in participants:
log.info(p['summonerName'].encode('utf8')+' '+str(p['summonerId'])+' '+str(p['teamId']))
idToParticipantsMap[p['summonerId']] = p
log.debug(getSummonerIdsOfOpponentTeam(id,game))
gameIdSets = queryPastGameIdSets( w, getSummonerIdsOfOpponentTeam(id,game), past10 )
friendshipRelations = computeFriendship(gameIdSets)
log.debug(friendshipRelations)
premades = computePremades(friendshipRelations)
premadesNames = []
for group in premades:
groupNames = []
if len(group) > 1:
for summonerId in group:
groupNames.append(idToParticipantsMap[summonerId]['summonerName'])
premadesNames.append(groupNames)
return premadesNames<|fim▁end|> | getPremades |