query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
sequencelengths
20
553
Performs a Q iteration updates Vm .
def _q_iteration ( self , Q , Bpp_solver , Vm , Va , pq ) : dVm = - Bpp_solver . solve ( Q ) # Update voltage. Vm [ pq ] = Vm [ pq ] + dVm V = Vm * exp ( 1j * Va ) return V , Vm , Va
800
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/ac_pf.py#L494-L503
[ "def", "_handle_eio_message", "(", "self", ",", "sid", ",", "data", ")", ":", "if", "sid", "in", "self", ".", "_binary_packet", ":", "pkt", "=", "self", ".", "_binary_packet", "[", "sid", "]", "if", "pkt", ".", "add_attachment", "(", "data", ")", ":", "del", "self", ".", "_binary_packet", "[", "sid", "]", "if", "pkt", ".", "packet_type", "==", "packet", ".", "BINARY_EVENT", ":", "self", ".", "_handle_event", "(", "sid", ",", "pkt", ".", "namespace", ",", "pkt", ".", "id", ",", "pkt", ".", "data", ")", "else", ":", "self", ".", "_handle_ack", "(", "sid", ",", "pkt", ".", "namespace", ",", "pkt", ".", "id", ",", "pkt", ".", "data", ")", "else", ":", "pkt", "=", "packet", ".", "Packet", "(", "encoded_packet", "=", "data", ")", "if", "pkt", ".", "packet_type", "==", "packet", ".", "CONNECT", ":", "self", ".", "_handle_connect", "(", "sid", ",", "pkt", ".", "namespace", ")", "elif", "pkt", ".", "packet_type", "==", "packet", ".", "DISCONNECT", ":", "self", ".", "_handle_disconnect", "(", "sid", ",", "pkt", ".", "namespace", ")", "elif", "pkt", ".", "packet_type", "==", "packet", ".", "EVENT", ":", "self", ".", "_handle_event", "(", "sid", ",", "pkt", ".", "namespace", ",", "pkt", ".", "id", ",", "pkt", ".", "data", ")", "elif", "pkt", ".", "packet_type", "==", "packet", ".", "ACK", ":", "self", ".", "_handle_ack", "(", "sid", ",", "pkt", ".", "namespace", ",", "pkt", ".", "id", ",", "pkt", ".", "data", ")", "elif", "pkt", ".", "packet_type", "==", "packet", ".", "BINARY_EVENT", "or", "pkt", ".", "packet_type", "==", "packet", ".", "BINARY_ACK", ":", "self", ".", "_binary_packet", "[", "sid", "]", "=", "pkt", "elif", "pkt", ".", "packet_type", "==", "packet", ".", "ERROR", ":", "raise", "ValueError", "(", "'Unexpected ERROR packet.'", ")", "else", ":", "raise", "ValueError", "(", "'Unknown packet type.'", ")" ]
Signal with sinusoidal frequency modulation .
def fmsin ( N , fnormin = 0.05 , fnormax = 0.45 , period = None , t0 = None , fnorm0 = 0.25 , pm1 = 1 ) : if period == None : period = N if t0 == None : t0 = N / 2 pm1 = nx . sign ( pm1 ) fnormid = 0.5 * ( fnormax + fnormin ) delta = 0.5 * ( fnormax - fnormin ) phi = - pm1 * nx . arccos ( ( fnorm0 - fnormid ) / delta ) time = nx . arange ( 1 , N ) - t0 phase = 2 * nx . pi * fnormid * time + delta * period * ( nx . sin ( 2 * nx . pi * time / period + phi ) - nx . sin ( phi ) ) y = nx . exp ( 1j * phase ) iflaw = fnormid + delta * nx . cos ( 2 * nx . pi * time / period + phi ) return y , iflaw
801
https://github.com/melizalab/libtfr/blob/9f7e7705793d258a0b205f185b20e3bbcda473da/examples/tfr_tm.py#L14-L56
[ "def", "on_failure", "(", "self", ",", "entity", ")", ":", "logger", ".", "error", "(", "\"Login failed, reason: %s\"", "%", "entity", ".", "getReason", "(", ")", ")", "self", ".", "connected", "=", "False" ]
Returns a case from the given file .
def _parse_rdf ( self , file ) : store = Graph ( ) store . parse ( file ) print len ( store )
802
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/rdf.py#L72-L78
[ "def", "calculate_pore_diameter", "(", "self", ")", ":", "self", ".", "pore_diameter", ",", "self", ".", "pore_closest_atom", "=", "pore_diameter", "(", "self", ".", "elements", ",", "self", ".", "coordinates", ")", "self", ".", "properties", "[", "'pore_diameter'", "]", "=", "{", "'diameter'", ":", "self", ".", "pore_diameter", ",", "'atom'", ":", "int", "(", "self", ".", "pore_closest_atom", ")", ",", "}", "return", "self", ".", "pore_diameter" ]
Load and installed metrics plugins .
def load_plugins ( group = 'metrics.plugin.10' ) : # on using entrypoints: # http://stackoverflow.com/questions/774824/explain-python-entry-points file_processors = [ ] build_processors = [ ] for ep in pkg_resources . iter_entry_points ( group , name = None ) : log . debug ( 'loading \'%s\'' , ep ) plugin = ep . load ( ) # load the plugin if hasattr ( plugin , 'get_file_processors' ) : file_processors . extend ( plugin . get_file_processors ( ) ) if hasattr ( plugin , 'get_build_processors' ) : build_processors . extend ( plugin . get_build_processors ( ) ) return file_processors , build_processors
803
https://github.com/finklabs/metrics/blob/fd9974af498831664b9ae8e8f3834e1ec2e8a699/metrics/plugins.py#L11-L25
[ "def", "_bsecurate_cli_compare_basis_files", "(", "args", ")", ":", "ret", "=", "curate", ".", "compare_basis_files", "(", "args", ".", "file1", ",", "args", ".", "file2", ",", "args", ".", "readfmt1", ",", "args", ".", "readfmt2", ",", "args", ".", "uncontract_general", ")", "if", "ret", ":", "return", "\"No difference found\"", "else", ":", "return", "\"DIFFERENCES FOUND. SEE ABOVE\"" ]
Returns a case object from the given input file object . The data format may be optionally specified .
def read_case ( input , format = None ) : # Map of data file types to readers. format_map = { "matpower" : MATPOWERReader , "psse" : PSSEReader , "pickle" : PickleReader } # Read case data. if format_map . has_key ( format ) : reader_klass = format_map [ format ] reader = reader_klass ( ) case = reader . read ( input ) else : # Try each of the readers at random. for reader_klass in format_map . values ( ) : reader = reader_klass ( ) try : case = reader . read ( input ) if case is not None : break except : pass else : case = None return case
804
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/main.py#L48-L74
[ "def", "_sync", "(", "self", ")", ":", "if", "(", "self", ".", "_opcount", ">", "self", ".", "checkpoint_operations", "or", "datetime", ".", "now", "(", ")", ">", "self", ".", "_last_sync", "+", "self", ".", "checkpoint_timeout", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Synchronizing queue metadata.\"", ")", "self", ".", "queue_metadata", ".", "sync", "(", ")", "self", ".", "_last_sync", "=", "datetime", ".", "now", "(", ")", "self", ".", "_opcount", "=", "0", "else", ":", "self", ".", "log", ".", "debug", "(", "\"NOT synchronizing queue metadata.\"", ")" ]
Detects the format of a network data file according to the file extension and the header .
def detect_data_file ( input , file_name = "" ) : _ , ext = os . path . splitext ( file_name ) if ext == ".m" : line = input . readline ( ) # first line if line . startswith ( "function" ) : type = "matpower" logger . info ( "Recognised MATPOWER data file." ) elif line . startswith ( "Bus.con" or line . startswith ( "%" ) ) : type = "psat" logger . info ( "Recognised PSAT data file." ) else : type = "unrecognised" input . seek ( 0 ) # reset buffer for parsing elif ( ext == ".raw" ) or ( ext == ".psse" ) : type = "psse" logger . info ( "Recognised PSS/E data file." ) elif ( ext == ".pkl" ) or ( ext == ".pickle" ) : type = "pickle" logger . info ( "Recognised pickled case." ) else : type = None return type
805
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/main.py#L80-L109
[ "async", "def", "_wait", "(", "self", ",", "entity_type", ",", "entity_id", ",", "action", ",", "predicate", "=", "None", ")", ":", "q", "=", "asyncio", ".", "Queue", "(", "loop", "=", "self", ".", "_connector", ".", "loop", ")", "async", "def", "callback", "(", "delta", ",", "old", ",", "new", ",", "model", ")", ":", "await", "q", ".", "put", "(", "delta", ".", "get_id", "(", ")", ")", "self", ".", "add_observer", "(", "callback", ",", "entity_type", ",", "action", ",", "entity_id", ",", "predicate", ")", "entity_id", "=", "await", "q", ".", "get", "(", ")", "# object might not be in the entity_map if we were waiting for a", "# 'remove' action", "return", "self", ".", "state", ".", "_live_entity_map", "(", "entity_type", ")", ".", "get", "(", "entity_id", ")" ]
Writes the case data in Graphviz DOT language .
def write ( self , file_or_filename , prog = None , format = 'xdot' ) : if prog is None : file = super ( DotWriter , self ) . write ( file_or_filename ) else : buf = StringIO . StringIO ( ) super ( DotWriter , self ) . write ( buf ) buf . seek ( 0 ) data = self . create ( buf . getvalue ( ) , prog , format ) if isinstance ( file_or_filename , basestring ) : file = None try : file = open ( file_or_filename , "wb" ) except : logger . error ( "Error opening %s." % file_or_filename ) finally : if file is not None : file . write ( data ) file . close ( ) else : file = file_or_filename file . write ( data ) return file
806
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/dot.py#L71-L100
[ "def", "unchunk", "(", "self", ")", ":", "if", "self", ".", "padding", "!=", "len", "(", "self", ".", "shape", ")", "*", "(", "0", ",", ")", ":", "shape", "=", "self", ".", "values", ".", "shape", "arr", "=", "empty", "(", "shape", ",", "dtype", "=", "object", ")", "for", "inds", "in", "product", "(", "*", "[", "arange", "(", "s", ")", "for", "s", "in", "shape", "]", ")", ":", "slices", "=", "[", "]", "for", "i", ",", "p", ",", "n", "in", "zip", "(", "inds", ",", "self", ".", "padding", ",", "shape", ")", ":", "start", "=", "None", "if", "(", "i", "==", "0", "or", "p", "==", "0", ")", "else", "p", "stop", "=", "None", "if", "(", "i", "==", "n", "-", "1", "or", "p", "==", "0", ")", "else", "-", "p", "slices", ".", "append", "(", "slice", "(", "start", ",", "stop", ",", "None", ")", ")", "arr", "[", "inds", "]", "=", "self", ".", "values", "[", "inds", "]", "[", "tuple", "(", "slices", ")", "]", "else", ":", "arr", "=", "self", ".", "values", "return", "allstack", "(", "arr", ".", "tolist", "(", ")", ")" ]
Writes bus data to file .
def write_bus_data ( self , file , padding = " " ) : for bus in self . case . buses : attrs = [ '%s="%s"' % ( k , v ) for k , v in self . bus_attr . iteritems ( ) ] # attrs.insert(0, 'label="%s"' % bus.name) attr_str = ", " . join ( attrs ) file . write ( "%s%s [%s];\n" % ( padding , bus . name , attr_str ) )
807
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/dot.py#L114-L122
[ "def", "_unwrap_result", "(", "action", ",", "result", ")", ":", "if", "not", "result", ":", "return", "elif", "action", "in", "{", "'DeleteItem'", ",", "'PutItem'", ",", "'UpdateItem'", "}", ":", "return", "_unwrap_delete_put_update_item", "(", "result", ")", "elif", "action", "==", "'GetItem'", ":", "return", "_unwrap_get_item", "(", "result", ")", "elif", "action", "==", "'Query'", "or", "action", "==", "'Scan'", ":", "return", "_unwrap_query_scan", "(", "result", ")", "elif", "action", "==", "'CreateTable'", ":", "return", "_unwrap_create_table", "(", "result", ")", "elif", "action", "==", "'DescribeTable'", ":", "return", "_unwrap_describe_table", "(", "result", ")", "return", "result" ]
Writes branch data in Graphviz DOT language .
def write_branch_data ( self , file , padding = " " ) : attrs = [ '%s="%s"' % ( k , v ) for k , v in self . branch_attr . iteritems ( ) ] attr_str = ", " . join ( attrs ) for br in self . case . branches : file . write ( "%s%s -> %s [%s];\n" % ( padding , br . from_bus . name , br . to_bus . name , attr_str ) )
808
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/dot.py#L125-L133
[ "def", "_batch_norm_new_params", "(", "input_shape", ",", "rng", ",", "axis", "=", "(", "0", ",", "1", ",", "2", ")", ",", "center", "=", "True", ",", "scale", "=", "True", ",", "*", "*", "kwargs", ")", ":", "del", "rng", ",", "kwargs", "axis", "=", "(", "axis", ",", ")", "if", "np", ".", "isscalar", "(", "axis", ")", "else", "axis", "shape", "=", "tuple", "(", "d", "for", "i", ",", "d", "in", "enumerate", "(", "input_shape", ")", "if", "i", "not", "in", "axis", ")", "beta", "=", "np", ".", "zeros", "(", "shape", ",", "dtype", "=", "'float32'", ")", "if", "center", "else", "(", ")", "gamma", "=", "np", ".", "ones", "(", "shape", ",", "dtype", "=", "'float32'", ")", "if", "scale", "else", "(", ")", "return", "(", "beta", ",", "gamma", ")" ]
Write generator data in Graphviz DOT language .
def write_generator_data ( self , file , padding = " " ) : attrs = [ '%s="%s"' % ( k , v ) for k , v in self . gen_attr . iteritems ( ) ] attr_str = ", " . join ( attrs ) edge_attrs = [ '%s="%s"' % ( k , v ) for k , v in { } . iteritems ( ) ] edge_attr_str = ", " . join ( edge_attrs ) for g in self . case . generators : # Generator node. file . write ( "%s%s [%s];\n" % ( padding , g . name , attr_str ) ) # Edge connecting generator and bus. file . write ( "%s%s -> %s [%s];\n" % ( padding , g . name , g . bus . name , edge_attr_str ) )
809
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/dot.py#L136-L151
[ "def", "_batch_norm_new_params", "(", "input_shape", ",", "rng", ",", "axis", "=", "(", "0", ",", "1", ",", "2", ")", ",", "center", "=", "True", ",", "scale", "=", "True", ",", "*", "*", "kwargs", ")", ":", "del", "rng", ",", "kwargs", "axis", "=", "(", "axis", ",", ")", "if", "np", ".", "isscalar", "(", "axis", ")", "else", "axis", "shape", "=", "tuple", "(", "d", "for", "i", ",", "d", "in", "enumerate", "(", "input_shape", ")", "if", "i", "not", "in", "axis", ")", "beta", "=", "np", ".", "zeros", "(", "shape", ",", "dtype", "=", "'float32'", ")", "if", "center", "else", "(", ")", "gamma", "=", "np", ".", "ones", "(", "shape", ",", "dtype", "=", "'float32'", ")", "if", "scale", "else", "(", ")", "return", "(", "beta", ",", "gamma", ")" ]
Creates and returns a representation of the graph using the Graphviz layout program given by prog according to the given format .
def create ( self , dotdata , prog = "dot" , format = "xdot" ) : import os , tempfile from dot2tex . dotparsing import find_graphviz # Map Graphviz executable names to their paths. progs = find_graphviz ( ) if progs is None : logger . warning ( "GraphViz executables not found." ) return None if not progs . has_key ( prog ) : logger . warning ( 'Invalid program [%s]. Available programs are: %s' % ( prog , progs . keys ( ) ) ) return None # Make a temporary file ... tmp_fd , tmp_name = tempfile . mkstemp ( ) os . close ( tmp_fd ) # ... and save the graph to it. dot_fd = file ( tmp_name , "w+b" ) dot_fd . write ( dotdata ) # DOT language. dot_fd . close ( ) # Get the temporary file directory name. tmp_dir = os . path . dirname ( tmp_name ) # Process the file using the layout program, specifying the format. p = subprocess . Popen ( ( progs [ prog ] , '-T' + format , tmp_name ) , cwd = tmp_dir , stderr = subprocess . PIPE , stdout = subprocess . PIPE ) stderr = p . stderr stdout = p . stdout # Make sense of the standard output form the process. stdout_output = list ( ) while True : data = stdout . read ( ) if not data : break stdout_output . append ( data ) stdout . close ( ) if stdout_output : stdout_output = '' . join ( stdout_output ) # Similarly so for any standard error. if not stderr . closed : stderr_output = list ( ) while True : data = stderr . read ( ) if not data : break stderr_output . append ( data ) stderr . close ( ) if stderr_output : stderr_output = '' . join ( stderr_output ) status = p . wait ( ) if status != 0 : logger . error ( "Program [%s] terminated with status: %d. stderr " "follows: %s" % ( prog , status , stderr_output ) ) elif stderr_output : logger . error ( "%s" , stderr_output ) # Remove the temporary file. os . unlink ( tmp_name ) return stdout_output
810
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/dot.py#L157-L235
[ "def", "cleanup", "(", "self", ")", ":", "self", ".", "_processing_stop", "=", "True", "self", ".", "_wakeup_processing_thread", "(", ")", "self", ".", "_processing_stopped_event", ".", "wait", "(", "3", ")" ]
compute output in XML format .
def format ( file_metrics , build_metrics ) : def indent ( elem , level = 0 ) : i = "\n" + level * " " if len ( elem ) : if not elem . text or not elem . text . strip ( ) : elem . text = i + " " if not elem . tail or not elem . tail . strip ( ) : elem . tail = i for elem in elem : indent ( elem , level + 1 ) if not elem . tail or not elem . tail . strip ( ) : elem . tail = i else : if level and ( not elem . tail or not elem . tail . strip ( ) ) : elem . tail = i root = ET . Element ( 'metrics' ) # file_metrics files = ET . Element ( 'files' ) root . append ( files ) for key in file_metrics . keys ( ) : tmp_file = ET . SubElement ( files , "file" , { 'name' : key , 'language' : file_metrics [ key ] [ 'language' ] } ) for name in file_metrics [ key ] . keys ( ) : if name == 'language' : continue tmp_metric = ET . SubElement ( tmp_file , "metric" , { 'name' : name , 'value' : str ( file_metrics [ key ] [ name ] ) } ) # build_metrics if build_metrics : build = ET . Element ( 'build' ) root . append ( build ) # TODO indent ( root ) if PY3 : body = ET . tostring ( root , encoding = 'unicode' ) else : body = ET . tostring ( root ) return body
811
https://github.com/finklabs/metrics/blob/fd9974af498831664b9ae8e8f3834e1ec2e8a699/metrics/outputformat_xml.py#L16-L59
[ "def", "defBoundary", "(", "self", ")", ":", "self", ".", "BoroCnstNatAll", "=", "np", ".", "zeros", "(", "self", ".", "StateCount", ")", "+", "np", ".", "nan", "# Find the natural borrowing constraint conditional on next period's state", "for", "j", "in", "range", "(", "self", ".", "StateCount", ")", ":", "PermShkMinNext", "=", "np", ".", "min", "(", "self", ".", "IncomeDstn_list", "[", "j", "]", "[", "1", "]", ")", "TranShkMinNext", "=", "np", ".", "min", "(", "self", ".", "IncomeDstn_list", "[", "j", "]", "[", "2", "]", ")", "self", ".", "BoroCnstNatAll", "[", "j", "]", "=", "(", "self", ".", "solution_next", ".", "mNrmMin", "[", "j", "]", "-", "TranShkMinNext", ")", "*", "(", "self", ".", "PermGroFac_list", "[", "j", "]", "*", "PermShkMinNext", ")", "/", "self", ".", "Rfree_list", "[", "j", "]", "self", ".", "BoroCnstNat_list", "=", "np", ".", "zeros", "(", "self", ".", "StateCount", ")", "+", "np", ".", "nan", "self", ".", "mNrmMin_list", "=", "np", ".", "zeros", "(", "self", ".", "StateCount", ")", "+", "np", ".", "nan", "self", ".", "BoroCnstDependency", "=", "np", ".", "zeros", "(", "(", "self", ".", "StateCount", ",", "self", ".", "StateCount", ")", ")", "+", "np", ".", "nan", "# The natural borrowing constraint in each current state is the *highest*", "# among next-state-conditional natural borrowing constraints that could", "# occur from this current state.", "for", "i", "in", "range", "(", "self", ".", "StateCount", ")", ":", "possible_next_states", "=", "self", ".", "MrkvArray", "[", "i", ",", ":", "]", ">", "0", "self", ".", "BoroCnstNat_list", "[", "i", "]", "=", "np", ".", "max", "(", "self", ".", "BoroCnstNatAll", "[", "possible_next_states", "]", ")", "# Explicitly handle the \"None\" case: ", "if", "self", ".", "BoroCnstArt", "is", "None", ":", "self", ".", "mNrmMin_list", "[", "i", "]", "=", "self", ".", "BoroCnstNat_list", "[", "i", "]", "else", ":", "self", ".", "mNrmMin_list", "[", "i", "]", "=", "np", ".", "max", "(", "[", "self", ".", "BoroCnstNat_list", "[", "i", "]", ",", "self", ".", "BoroCnstArt", "]", ")", "self", ".", "BoroCnstDependency", "[", "i", ",", ":", "]", "=", "self", ".", "BoroCnstNat_list", "[", "i", "]", "==", "self", ".", "BoroCnstNatAll" ]
Asks the user his opinion .
def ask ( message = 'Are you sure? [y/N]' ) : agree = False answer = raw_input ( message ) . lower ( ) if answer . startswith ( 'y' ) : agree = True return agree
812
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L45-L51
[ "def", "synchronizeLayout", "(", "primary", ",", "secondary", ",", "surface_size", ")", ":", "primary", ".", "configure_bound", "(", "surface_size", ")", "secondary", ".", "configure_bound", "(", "surface_size", ")", "# Check for key size.", "if", "(", "primary", ".", "key_size", "<", "secondary", ".", "key_size", ")", ":", "logging", ".", "warning", "(", "'Normalizing key size from secondary to primary'", ")", "secondary", ".", "key_size", "=", "primary", ".", "key_size", "elif", "(", "primary", ".", "key_size", ">", "secondary", ".", "key_size", ")", ":", "logging", ".", "warning", "(", "'Normalizing key size from primary to secondary'", ")", "primary", ".", "key_size", "=", "secondary", ".", "key_size", "if", "(", "primary", ".", "size", "[", "1", "]", ">", "secondary", ".", "size", "[", "1", "]", ")", ":", "logging", ".", "warning", "(", "'Normalizing layout size from secondary to primary'", ")", "secondary", ".", "set_size", "(", "primary", ".", "size", ",", "surface_size", ")", "elif", "(", "primary", ".", "size", "[", "1", "]", "<", "secondary", ".", "size", "[", "1", "]", ")", ":", "logging", ".", "warning", "(", "'Normalizing layout size from primary to secondary'", ")", "primary", ".", "set_size", "(", "secondary", ".", "size", ",", "surface_size", ")" ]
What do you expect?
def main ( prog_args = None ) : if prog_args is None : prog_args = sys . argv parser = optparse . OptionParser ( ) parser . usage = """Usage: %[prog] [options] [<path>]""" parser . add_option ( "-t" , "--test-program" , dest = "test_program" , default = "nose" , help = "specifies the test-program to use. Valid values" " include `nose` (or `nosetests`), `django`, `py` (for `py.test`), " '`symfony`, `jelix` `phpunit` and `tox`' ) parser . add_option ( "-d" , "--debug" , dest = "debug" , action = "store_true" , default = False ) parser . add_option ( '-s' , '--size-max' , dest = 'size_max' , default = 25 , type = "int" , help = "Sets the maximum size (in MB) of files." ) parser . add_option ( '--custom-args' , dest = 'custom_args' , default = '' , type = "str" , help = "Defines custom arguments to pass after the test program command" ) parser . add_option ( '--ignore-dirs' , dest = 'ignore_dirs' , default = '' , type = "str" , help = "Defines directories to ignore. Use a comma-separated list." ) parser . add_option ( '-y' , '--quiet' , dest = 'quiet' , action = "store_true" , default = False , help = "Don't ask for any input." ) opt , args = parser . parse_args ( prog_args ) if args [ 1 : ] : path = args [ 1 ] else : path = '.' try : watcher = Watcher ( path , opt . test_program , opt . debug , opt . custom_args , opt . ignore_dirs , opt . quiet ) watcher_file_size = watcher . file_sizes ( ) if watcher_file_size > opt . size_max : message = "It looks like the total file size (%dMb) is larger than the `max size` option (%dMb).\nThis may slow down the file comparison process, and thus the daemon performances.\nDo you wish to continue? [y/N] " % ( watcher_file_size , opt . size_max ) if not opt . quiet and not ask ( message ) : raise CancelDueToUserRequest ( 'Ok, thx, bye...' ) print "Ready to watch file changes..." watcher . loop ( ) except ( KeyboardInterrupt , SystemExit ) : # Ignore when you exit via Crtl-C pass except Exception , msg : print msg print "Bye"
813
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L235-L288
[ "def", "write_tables", "(", "fname", ",", "table_names", "=", "None", ",", "prefix", "=", "None", ",", "compress", "=", "False", ",", "local", "=", "False", ")", ":", "if", "table_names", "is", "None", ":", "table_names", "=", "list_tables", "(", ")", "tables", "=", "(", "get_table", "(", "t", ")", "for", "t", "in", "table_names", ")", "key_template", "=", "'{}/{{}}'", ".", "format", "(", "prefix", ")", "if", "prefix", "is", "not", "None", "else", "'{}'", "# set compression options to zlib level-1 if compress arg is True", "complib", "=", "compress", "and", "'zlib'", "or", "None", "complevel", "=", "compress", "and", "1", "or", "0", "with", "pd", ".", "HDFStore", "(", "fname", ",", "mode", "=", "'a'", ",", "complib", "=", "complib", ",", "complevel", "=", "complevel", ")", "as", "store", ":", "for", "t", "in", "tables", ":", "# if local arg is True, store only local columns", "columns", "=", "None", "if", "local", "is", "True", ":", "columns", "=", "t", ".", "local_columns", "store", "[", "key_template", ".", "format", "(", "t", ".", "name", ")", "]", "=", "t", ".", "to_frame", "(", "columns", "=", "columns", ")" ]
Checks if configuration is ok .
def check_configuration ( self , file_path , test_program , custom_args ) : # checking filepath if not os . path . isdir ( file_path ) : raise InvalidFilePath ( "INVALID CONFIGURATION: file path %s is not a directory" % os . path . abspath ( file_path ) ) if not test_program in IMPLEMENTED_TEST_PROGRAMS : raise InvalidTestProgram ( 'The `%s` is unknown, or not yet implemented. Please chose another one.' % test_program ) if custom_args : if not self . quiet and not ask ( "WARNING!!!\nYou are about to run the following command\n\n $ %s\n\nAre you sure you still want to proceed [y/N]? " % self . get_cmd ( ) ) : raise CancelDueToUserRequest ( 'Test cancelled...' )
814
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L88-L101
[ "def", "fetch", "(", "self", ")", ":", "if", "self", ".", "_file_path", "is", "not", "None", ":", "return", "self", ".", "_file_path", "temp_path", "=", "self", ".", "context", ".", "work_path", "if", "self", ".", "_content_hash", "is", "not", "None", ":", "self", ".", "_file_path", "=", "storage", ".", "load_file", "(", "self", ".", "_content_hash", ",", "temp_path", "=", "temp_path", ")", "return", "self", ".", "_file_path", "if", "self", ".", "response", "is", "not", "None", ":", "self", ".", "_file_path", "=", "random_filename", "(", "temp_path", ")", "content_hash", "=", "sha1", "(", ")", "with", "open", "(", "self", ".", "_file_path", ",", "'wb'", ")", "as", "fh", ":", "for", "chunk", "in", "self", ".", "response", ".", "iter_content", "(", "chunk_size", "=", "8192", ")", ":", "content_hash", ".", "update", "(", "chunk", ")", "fh", ".", "write", "(", "chunk", ")", "self", ".", "_remove_file", "=", "True", "chash", "=", "content_hash", ".", "hexdigest", "(", ")", "self", ".", "_content_hash", "=", "storage", ".", "archive_file", "(", "self", ".", "_file_path", ",", "content_hash", "=", "chash", ")", "if", "self", ".", "http", ".", "cache", "and", "self", ".", "ok", ":", "self", ".", "context", ".", "set_tag", "(", "self", ".", "request_id", ",", "self", ".", "serialize", "(", ")", ")", "self", ".", "retrieved_at", "=", "datetime", ".", "utcnow", "(", ")", ".", "isoformat", "(", ")", "return", "self", ".", "_file_path" ]
Checks if the test program is available in the python environnement
def check_dependencies ( self ) : if self . test_program == 'nose' : try : import nose except ImportError : sys . exit ( 'Nosetests is not available on your system. Please install it and try to run it again' ) if self . test_program == 'py' : try : import py except : sys . exit ( 'py.test is not available on your system. Please install it and try to run it again' ) if self . test_program == 'django' : try : import django except : sys . exit ( 'django is not available on your system. Please install it and try to run it again' ) if self . test_program == 'phpunit' : try : process = subprocess . check_call ( [ 'phpunit' , '--version' ] ) except : sys . exit ( 'phpunit is not available on your system. Please install it and try to run it again' ) if self . test_program == 'tox' : try : import tox except ImportError : sys . exit ( 'tox is not available on your system. Please install it and try to run it again' )
815
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L103-L129
[ "def", "unindex_layers_with_issues", "(", "self", ",", "use_cache", "=", "False", ")", ":", "from", "hypermap", ".", "aggregator", ".", "models", "import", "Issue", ",", "Layer", ",", "Service", "from", "django", ".", "contrib", ".", "contenttypes", ".", "models", "import", "ContentType", "layer_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Layer", ")", "service_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Service", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "layer_type", ".", "id", ")", ":", "unindex_layer", "(", "issue", ".", "content_object", ".", "id", ",", "use_cache", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "service_type", ".", "id", ")", ":", "for", "layer", "in", "issue", ".", "content_object", ".", "layer_set", ".", "all", "(", ")", ":", "unindex_layer", "(", "layer", ".", "id", ",", "use_cache", ")" ]
Returns the full command to be executed at runtime
def get_cmd ( self ) : cmd = None if self . test_program in ( 'nose' , 'nosetests' ) : cmd = "nosetests %s" % self . file_path elif self . test_program == 'django' : executable = "%s/manage.py" % self . file_path if os . path . exists ( executable ) : cmd = "python %s/manage.py test" % self . file_path else : cmd = "django-admin.py test" elif self . test_program == 'py' : cmd = 'py.test %s' % self . file_path elif self . test_program == 'symfony' : cmd = 'symfony test-all' elif self . test_program == 'jelix' : # as seen on http://jelix.org/articles/fr/manuel-1.1/tests_unitaires cmd = 'php tests.php' elif self . test_program == 'phpunit' : cmd = 'phpunit' elif self . test_program == 'sphinx' : cmd = 'make html' elif self . test_program == 'tox' : cmd = 'tox' if not cmd : raise InvalidTestProgram ( "The test program %s is unknown. Valid options are: `nose`, `django` and `py`" % self . test_program ) # adding custom args if self . custom_args : cmd = '%s %s' % ( cmd , self . custom_args ) return cmd
816
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L132-L164
[ "def", "deletecols", "(", "X", ",", "cols", ")", ":", "if", "isinstance", "(", "cols", ",", "str", ")", ":", "cols", "=", "cols", ".", "split", "(", "','", ")", "retain", "=", "[", "n", "for", "n", "in", "X", ".", "dtype", ".", "names", "if", "n", "not", "in", "cols", "]", "if", "len", "(", "retain", ")", ">", "0", ":", "return", "X", "[", "retain", "]", "else", ":", "return", "None" ]
Returns True if the file is not ignored
def include ( self , path ) : for extension in IGNORE_EXTENSIONS : if path . endswith ( extension ) : return False parts = path . split ( os . path . sep ) for part in parts : if part in self . ignore_dirs : return False return True
817
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L167-L176
[ "def", "_conn_string_adodbapi", "(", "self", ",", "db_key", ",", "instance", "=", "None", ",", "conn_key", "=", "None", ",", "db_name", "=", "None", ")", ":", "if", "instance", ":", "_", ",", "host", ",", "username", ",", "password", ",", "database", ",", "_", "=", "self", ".", "_get_access_info", "(", "instance", ",", "db_key", ",", "db_name", ")", "elif", "conn_key", ":", "_", ",", "host", ",", "username", ",", "password", ",", "database", ",", "_", "=", "conn_key", ".", "split", "(", "\":\"", ")", "p", "=", "self", ".", "_get_adoprovider", "(", "instance", ")", "conn_str", "=", "'Provider={};Data Source={};Initial Catalog={};'", ".", "format", "(", "p", ",", "host", ",", "database", ")", "if", "username", ":", "conn_str", "+=", "'User ID={};'", ".", "format", "(", "username", ")", "if", "password", ":", "conn_str", "+=", "'Password={};'", ".", "format", "(", "password", ")", "if", "not", "username", "and", "not", "password", ":", "conn_str", "+=", "'Integrated Security=SSPI;'", "return", "conn_str" ]
Extracts differences between lists . For debug purposes
def diff_list ( self , list1 , list2 ) : for key in list1 : if key in list2 and list2 [ key ] != list1 [ key ] : print key elif key not in list2 : print key
818
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L205-L211
[ "def", "schedule", "(", "self", ",", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# if the func is already a job object, just schedule that directly.", "if", "isinstance", "(", "func", ",", "Job", ")", ":", "job", "=", "func", "# else, turn it into a job first.", "else", ":", "job", "=", "Job", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", "job", ".", "track_progress", "=", "kwargs", ".", "pop", "(", "'track_progress'", ",", "False", ")", "job", ".", "cancellable", "=", "kwargs", ".", "pop", "(", "'cancellable'", ",", "False", ")", "job", ".", "extra_metadata", "=", "kwargs", ".", "pop", "(", "'extra_metadata'", ",", "{", "}", ")", "job_id", "=", "self", ".", "storage", ".", "schedule_job", "(", "job", ")", "return", "job_id" ]
Runs the appropriate command
def run ( self , cmd ) : print datetime . datetime . now ( ) output = subprocess . Popen ( cmd , shell = True ) output = output . communicate ( ) [ 0 ] print output
819
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L213-L218
[ "def", "start", "(", "st_reg_number", ")", ":", "#st_reg_number = str(st_reg_number)", "weights", "=", "[", "4", ",", "3", ",", "2", ",", "9", ",", "8", ",", "7", ",", "6", ",", "5", ",", "4", ",", "3", ",", "2", "]", "digits", "=", "st_reg_number", "[", ":", "len", "(", "st_reg_number", ")", "-", "2", "]", "check_digits", "=", "st_reg_number", "[", "-", "2", ":", "]", "divisor", "=", "11", "if", "len", "(", "st_reg_number", ")", ">", "13", ":", "return", "False", "sum_total", "=", "0", "for", "i", "in", "range", "(", "len", "(", "digits", ")", ")", ":", "sum_total", "=", "sum_total", "+", "int", "(", "digits", "[", "i", "]", ")", "*", "weights", "[", "i", "]", "rest_division", "=", "sum_total", "%", "divisor", "first_digit", "=", "divisor", "-", "rest_division", "if", "first_digit", "==", "10", "or", "first_digit", "==", "11", ":", "first_digit", "=", "0", "if", "str", "(", "first_digit", ")", "!=", "check_digits", "[", "0", "]", ":", "return", "False", "digits", "=", "digits", "+", "str", "(", "first_digit", ")", "weights", "=", "[", "5", "]", "+", "weights", "sum_total", "=", "0", "for", "i", "in", "range", "(", "len", "(", "digits", ")", ")", ":", "sum_total", "=", "sum_total", "+", "int", "(", "digits", "[", "i", "]", ")", "*", "weights", "[", "i", "]", "rest_division", "=", "sum_total", "%", "divisor", "second_digit", "=", "divisor", "-", "rest_division", "if", "second_digit", "==", "10", "or", "second_digit", "==", "11", ":", "second_digit", "=", "0", "return", "str", "(", "first_digit", ")", "+", "str", "(", "second_digit", ")", "==", "check_digits" ]
Main loop daemon .
def loop ( self ) : while True : sleep ( 1 ) new_file_list = self . walk ( self . file_path , { } ) if new_file_list != self . file_list : if self . debug : self . diff_list ( new_file_list , self . file_list ) self . run_tests ( ) self . file_list = new_file_list
820
https://github.com/brunobord/tdaemon/blob/733b5bddb4b12bc3db326a192ce5606f28768307/tdaemon.py#L224-L233
[ "def", "set_attrs", "(", "self", ")", ":", "self", ".", "attrs", ".", "table_type", "=", "str", "(", "self", ".", "table_type", ")", "self", ".", "attrs", ".", "index_cols", "=", "self", ".", "index_cols", "(", ")", "self", ".", "attrs", ".", "values_cols", "=", "self", ".", "values_cols", "(", ")", "self", ".", "attrs", ".", "non_index_axes", "=", "self", ".", "non_index_axes", "self", ".", "attrs", ".", "data_columns", "=", "self", ".", "data_columns", "self", ".", "attrs", ".", "nan_rep", "=", "self", ".", "nan_rep", "self", ".", "attrs", ".", "encoding", "=", "self", ".", "encoding", "self", ".", "attrs", ".", "errors", "=", "self", ".", "errors", "self", ".", "attrs", ".", "levels", "=", "self", ".", "levels", "self", ".", "attrs", ".", "metadata", "=", "self", ".", "metadata", "self", ".", "set_info", "(", ")" ]
compute output in JSON format .
def format ( file_metrics , build_metrics ) : metrics = { 'files' : file_metrics } if build_metrics : metrics [ 'build' ] = build_metrics body = json . dumps ( metrics , sort_keys = True , indent = 4 ) + '\n' return body
821
https://github.com/finklabs/metrics/blob/fd9974af498831664b9ae8e8f3834e1ec2e8a699/metrics/outputformat_json.py#L9-L15
[ "def", "aux", "(", "self", ",", "aux", ")", ":", "if", "aux", "==", "self", ".", "_aux", ":", "return", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "if", "aux", "is", "not", "None", ":", "self", ".", "_aux", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "aux", ",", "self", ".", "_project", ")", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: aux port set to {port}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "port", "=", "aux", ")", ")" ]
Returns the linear equality and inequality constraints .
def split_linear_constraints ( A , l , u ) : ieq = [ ] igt = [ ] ilt = [ ] ibx = [ ] for i in range ( len ( l ) ) : if abs ( u [ i ] - l [ i ] ) <= EPS : ieq . append ( i ) elif ( u [ i ] > 1e10 ) and ( l [ i ] > - 1e10 ) : igt . append ( i ) elif ( l [ i ] <= - 1e10 ) and ( u [ i ] < 1e10 ) : ilt . append ( i ) elif ( abs ( u [ i ] - l [ i ] ) > EPS ) and ( u [ i ] < 1e10 ) and ( l [ i ] > - 1e10 ) : ibx . append ( i ) else : raise ValueError Ae = A [ ieq , : ] Ai = sparse ( [ A [ ilt , : ] , - A [ igt , : ] , A [ ibx , : ] , - A [ ibx , : ] ] ) be = u [ ieq , : ] bi = matrix ( [ u [ ilt ] , - l [ igt ] , u [ ibx ] , - l [ ibx ] ] ) return Ae , be , Ai , bi
822
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/cvxopf.py#L472-L496
[ "def", "create_stream_subscription", "(", "self", ",", "stream", ",", "on_data", ",", "timeout", "=", "60", ")", ":", "options", "=", "rest_pb2", ".", "StreamSubscribeRequest", "(", ")", "options", ".", "stream", "=", "stream", "manager", "=", "WebSocketSubscriptionManager", "(", "self", ".", "_client", ",", "resource", "=", "'stream'", ",", "options", "=", "options", ")", "# Represent subscription as a future", "subscription", "=", "WebSocketSubscriptionFuture", "(", "manager", ")", "wrapped_callback", "=", "functools", ".", "partial", "(", "_wrap_callback_parse_stream_data", ",", "subscription", ",", "on_data", ")", "manager", ".", "open", "(", "wrapped_callback", ",", "instance", "=", "self", ".", "_instance", ")", "# Wait until a reply or exception is received", "subscription", ".", "reply", "(", "timeout", "=", "timeout", ")", "return", "subscription" ]
Computes the partial derivative of power injection w . r . t . voltage .
def dSbus_dV ( Y , V ) : I = Y * V diagV = spdiag ( V ) diagIbus = spdiag ( I ) diagVnorm = spdiag ( div ( V , abs ( V ) ) ) # Element-wise division. dS_dVm = diagV * conj ( Y * diagVnorm ) + conj ( diagIbus ) * diagVnorm dS_dVa = 1j * diagV * conj ( diagIbus - Y * diagV ) return dS_dVm , dS_dVa
823
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/cvxopf.py#L502-L518
[ "def", "json_options_to_metadata", "(", "options", ",", "add_brackets", "=", "True", ")", ":", "try", ":", "options", "=", "loads", "(", "'{'", "+", "options", "+", "'}'", "if", "add_brackets", "else", "options", ")", "return", "options", "except", "ValueError", ":", "return", "{", "}" ]
Computes partial derivatives of branch currents w . r . t . voltage .
def dIbr_dV ( Yf , Yt , V ) : # nb = len(V) Vnorm = div ( V , abs ( V ) ) diagV = spdiag ( V ) diagVnorm = spdiag ( Vnorm ) dIf_dVa = Yf * 1j * diagV dIf_dVm = Yf * diagVnorm dIt_dVa = Yt * 1j * diagV dIt_dVm = Yt * diagVnorm # Compute currents. If = Yf * V It = Yt * V return dIf_dVa , dIf_dVm , dIt_dVa , dIt_dVm , If , It
824
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/cvxopf.py#L524-L544
[ "def", "read_json", "(", "cls", ",", "filename", ")", ":", "proxy", "=", "UnitySArrayProxy", "(", ")", "proxy", ".", "load_from_json_record_files", "(", "_make_internal_url", "(", "filename", ")", ")", "return", "cls", "(", "_proxy", "=", "proxy", ")" ]
Computes the branch power flow vector and the partial derivative of branch power flow w . r . t voltage .
def dSbr_dV ( Yf , Yt , V , buses , branches ) : nl = len ( branches ) nb = len ( V ) f = matrix ( [ l . from_bus . _i for l in branches ] ) t = matrix ( [ l . to_bus . _i for l in branches ] ) # Compute currents. If = Yf * V It = Yt * V Vnorm = div ( V , abs ( V ) ) diagVf = spdiag ( V [ f ] ) diagIf = spdiag ( If ) diagVt = spdiag ( V [ t ] ) diagIt = spdiag ( It ) diagV = spdiag ( V ) diagVnorm = spdiag ( Vnorm ) ibr = range ( nl ) size = ( nl , nb ) # Partial derivative of S w.r.t voltage phase angle. dSf_dVa = 1j * ( conj ( diagIf ) * spmatrix ( V [ f ] , ibr , f , size ) - diagVf * conj ( Yf * diagV ) ) dSt_dVa = 1j * ( conj ( diagIt ) * spmatrix ( V [ t ] , ibr , t , size ) - diagVt * conj ( Yt * diagV ) ) # Partial derivative of S w.r.t. voltage amplitude. dSf_dVm = diagVf * conj ( Yf * diagVnorm ) + conj ( diagIf ) * spmatrix ( Vnorm [ f ] , ibr , f , size ) dSt_dVm = diagVt * conj ( Yt * diagVnorm ) + conj ( diagIt ) * spmatrix ( Vnorm [ t ] , ibr , t , size ) # Compute power flow vectors. Sf = mul ( V [ f ] , conj ( If ) ) St = mul ( V [ t ] , conj ( It ) ) return dSf_dVa , dSf_dVm , dSt_dVa , dSt_dVm , Sf , St
825
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/cvxopf.py#L550-L593
[ "def", "stop_listener", "(", "self", ")", ":", "if", "self", ".", "sock", "is", "not", "None", ":", "self", ".", "sock", ".", "close", "(", ")", "self", ".", "sock", "=", "None", "self", ".", "tracks", "=", "{", "}" ]
Partial derivatives of squared flow magnitudes w . r . t voltage .
def dAbr_dV ( dSf_dVa , dSf_dVm , dSt_dVa , dSt_dVm , Sf , St ) : dAf_dPf = spdiag ( 2 * Sf . real ( ) ) dAf_dQf = spdiag ( 2 * Sf . imag ( ) ) dAt_dPt = spdiag ( 2 * St . real ( ) ) dAt_dQt = spdiag ( 2 * St . imag ( ) ) # Partial derivative of apparent power magnitude w.r.t voltage # phase angle. dAf_dVa = dAf_dPf * dSf_dVa . real ( ) + dAf_dQf * dSf_dVa . imag ( ) dAt_dVa = dAt_dPt * dSt_dVa . real ( ) + dAt_dQt * dSt_dVa . imag ( ) # Partial derivative of apparent power magnitude w.r.t. voltage # amplitude. dAf_dVm = dAf_dPf * dSf_dVm . real ( ) + dAf_dQf * dSf_dVm . imag ( ) dAt_dVm = dAt_dPt * dSt_dVm . real ( ) + dAt_dQt * dSt_dVm . imag ( ) return dAf_dVa , dAf_dVm , dAt_dVa , dAt_dVm
826
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/cvxopf.py#L599-L621
[ "def", "save_key_file", "(", "self", ")", ":", "if", "self", ".", "client_key", "is", "None", ":", "return", "if", "self", ".", "key_file_path", ":", "key_file_path", "=", "self", ".", "key_file_path", "else", ":", "key_file_path", "=", "self", ".", "_get_key_file_path", "(", ")", "logger", ".", "debug", "(", "'save keyfile to %s'", ",", "key_file_path", ")", "with", "open", "(", "key_file_path", ",", "'w+'", ")", "as", "f", ":", "raw_data", "=", "f", ".", "read", "(", ")", "key_dict", "=", "{", "}", "if", "raw_data", ":", "key_dict", "=", "json", ".", "loads", "(", "raw_data", ")", "key_dict", "[", "self", ".", "ip", "]", "=", "self", ".", "client_key", "f", ".", "write", "(", "json", ".", "dumps", "(", "key_dict", ")", ")" ]
Computes 2nd derivatives of power injection w . r . t . voltage .
def d2Sbus_dV2 ( Ybus , V , lam ) : n = len ( V ) Ibus = Ybus * V diaglam = spdiag ( lam ) diagV = spdiag ( V ) A = spmatrix ( mul ( lam , V ) , range ( n ) , range ( n ) ) B = Ybus * diagV C = A * conj ( B ) D = Ybus . H * diagV E = conj ( diagV ) * ( D * diaglam - spmatrix ( D * lam , range ( n ) , range ( n ) ) ) F = C - A * spmatrix ( conj ( Ibus ) , range ( n ) , range ( n ) ) G = spmatrix ( div ( matrix ( 1.0 , ( n , 1 ) ) , abs ( V ) ) , range ( n ) , range ( n ) ) Gaa = E + F Gva = 1j * G * ( E - F ) Gav = Gva . T Gvv = G * ( C + C . T ) * G return Gaa , Gav , Gva , Gvv
827
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/cvxopf.py#L627-L648
[ "def", "json_options_to_metadata", "(", "options", ",", "add_brackets", "=", "True", ")", ":", "try", ":", "options", "=", "loads", "(", "'{'", "+", "options", "+", "'}'", "if", "add_brackets", "else", "options", ")", "return", "options", "except", "ValueError", ":", "return", "{", "}" ]
Computes 2nd derivatives of complex branch current w . r . t . voltage .
def d2Ibr_dV2 ( Ybr , V , lam ) : nb = len ( V ) diaginvVm = spdiag ( div ( matrix ( 1.0 , ( nb , 1 ) ) , abs ( V ) ) ) Haa = spdiag ( mul ( - ( Ybr . T * lam ) , V ) ) Hva = - 1j * Haa * diaginvVm Hav = Hva Hvv = spmatrix ( [ ] , [ ] , [ ] , ( nb , nb ) ) return Haa , Hav , Hva , Hvv
828
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/cvxopf.py#L654-L665
[ "def", "to_json", "(", "self", ",", "value", ",", "preserve_ro", ")", ":", "if", "hasattr", "(", "value", ",", "'to_json_dict'", ")", ":", "return", "value", ".", "to_json_dict", "(", "preserve_ro", ")", "elif", "isinstance", "(", "value", ",", "dict", ")", "and", "self", ".", "_atype", "==", "ApiConfig", ":", "return", "config_to_api_list", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "return", "value", ".", "strftime", "(", "self", ".", "DATE_FMT", ")", "elif", "isinstance", "(", "value", ",", "list", ")", "or", "isinstance", "(", "value", ",", "tuple", ")", ":", "if", "self", ".", "_is_api_list", ":", "return", "ApiList", "(", "value", ")", ".", "to_json_dict", "(", ")", "else", ":", "return", "[", "self", ".", "to_json", "(", "x", ",", "preserve_ro", ")", "for", "x", "in", "value", "]", "else", ":", "return", "value" ]
Computes 2nd derivatives of complex power flow w . r . t . voltage .
def d2Sbr_dV2 ( Cbr , Ybr , V , lam ) : nb = len ( V ) diaglam = spdiag ( lam ) diagV = spdiag ( V ) A = Ybr . H * diaglam * Cbr B = conj ( diagV ) * A * diagV D = spdiag ( mul ( ( A * V ) , conj ( V ) ) ) E = spdiag ( mul ( ( A . T * conj ( V ) ) , V ) ) F = B + B . T G = spdiag ( div ( matrix ( 1.0 , ( nb , 1 ) ) , abs ( V ) ) ) Haa = F - D - E Hva = 1j * G * ( B - B . T - D + E ) Hav = Hva . T Hvv = G * F * G return Haa , Hav , Hva , Hvv
829
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/cvxopf.py#L671-L691
[ "def", "copy_uri_options", "(", "hosts", ",", "mongodb_uri", ")", ":", "if", "\"?\"", "in", "mongodb_uri", ":", "options", "=", "mongodb_uri", ".", "split", "(", "\"?\"", ",", "1", ")", "[", "1", "]", "else", ":", "options", "=", "None", "uri", "=", "\"mongodb://\"", "+", "hosts", "if", "options", ":", "uri", "+=", "\"/?\"", "+", "options", "return", "uri" ]
Converts a sparse SciPy matrix into a sparse CVXOPT matrix .
def tocvx ( B ) : Bcoo = B . tocoo ( ) return spmatrix ( Bcoo . data , Bcoo . row . tolist ( ) , Bcoo . col . tolist ( ) )
830
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/cvxopf.py#L741-L745
[ "def", "delete_datapoints_in_time_range", "(", "self", ",", "start_dt", "=", "None", ",", "end_dt", "=", "None", ")", ":", "start_dt", "=", "to_none_or_dt", "(", "validate_type", "(", "start_dt", ",", "datetime", ".", "datetime", ",", "type", "(", "None", ")", ")", ")", "end_dt", "=", "to_none_or_dt", "(", "validate_type", "(", "end_dt", ",", "datetime", ".", "datetime", ",", "type", "(", "None", ")", ")", ")", "params", "=", "{", "}", "if", "start_dt", "is", "not", "None", ":", "params", "[", "'startTime'", "]", "=", "isoformat", "(", "start_dt", ")", "if", "end_dt", "is", "not", "None", ":", "params", "[", "'endTime'", "]", "=", "isoformat", "(", "end_dt", ")", "self", ".", "_conn", ".", "delete", "(", "\"/ws/DataPoint/{stream_id}{querystring}\"", ".", "format", "(", "stream_id", "=", "self", ".", "get_stream_id", "(", ")", ",", "querystring", "=", "\"?\"", "+", "urllib", ".", "parse", ".", "urlencode", "(", "params", ")", "if", "params", "else", "\"\"", ",", ")", ")" ]
Directly maps the agents and the tasks .
def doInteractions ( self , number = 1 ) : t0 = time . time ( ) for _ in range ( number ) : self . _oneInteraction ( ) elapsed = time . time ( ) - t0 logger . info ( "%d interactions executed in %.3fs." % ( number , elapsed ) ) return self . stepid
831
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/discrete/experiment.py#L72-L83
[ "def", "parse_token_response", "(", "body", ",", "scope", "=", "None", ")", ":", "try", ":", "params", "=", "json", ".", "loads", "(", "body", ")", "except", "ValueError", ":", "# Fall back to URL-encoded string, to support old implementations,", "# including (at time of writing) Facebook. See:", "# https://github.com/oauthlib/oauthlib/issues/267", "params", "=", "dict", "(", "urlparse", ".", "parse_qsl", "(", "body", ")", ")", "for", "key", "in", "(", "'expires_in'", ",", ")", ":", "if", "key", "in", "params", ":", "# cast things to int", "params", "[", "key", "]", "=", "int", "(", "params", "[", "key", "]", ")", "if", "'scope'", "in", "params", ":", "params", "[", "'scope'", "]", "=", "scope_to_list", "(", "params", "[", "'scope'", "]", ")", "if", "'expires_in'", "in", "params", ":", "params", "[", "'expires_at'", "]", "=", "time", ".", "time", "(", ")", "+", "int", "(", "params", "[", "'expires_in'", "]", ")", "params", "=", "OAuth2Token", "(", "params", ",", "old_scope", "=", "scope", ")", "validate_token_parameters", "(", "params", ")", "return", "params" ]
Exciter model .
def exciter ( self , Xexc , Pexc , Vexc ) : exciters = self . exciters F = zeros ( Xexc . shape ) typ1 = [ e . generator . _i for e in exciters if e . model == CONST_EXCITATION ] typ2 = [ e . generator . _i for e in exciters if e . model == IEEE_DC1A ] # Exciter type 1: constant excitation F [ typ1 , : ] = 0.0 # Exciter type 2: IEEE DC1A Efd = Xexc [ typ2 , 0 ] Uf = Xexc [ typ2 , 1 ] Ur = Xexc [ typ2 , 2 ] Ka = Pexc [ typ2 , 0 ] Ta = Pexc [ typ2 , 1 ] Ke = Pexc [ typ2 , 2 ] Te = Pexc [ typ2 , 3 ] Kf = Pexc [ typ2 , 4 ] Tf = Pexc [ typ2 , 5 ] Aex = Pexc [ typ2 , 6 ] Bex = Pexc [ typ2 , 7 ] Ur_min = Pexc [ typ2 , 8 ] Ur_max = Pexc [ typ2 , 9 ] Uref = Pexc [ typ2 , 10 ] Uref2 = Pexc [ typ2 , 11 ] U = Vexc [ typ2 , 1 ] Ux = Aex * exp ( Bex * Efd ) dUr = 1 / Ta * ( Ka * ( Uref - U + Uref2 - Uf ) - Ur ) dUf = 1 / Tf * ( Kf / Te * ( Ur - Ux - Ke * Efd ) - Uf ) if sum ( flatnonzero ( Ur > Ur_max ) ) >= 1 : Ur2 = Ur_max elif sum ( flatnonzero ( Ur < Ur_max ) ) >= 1 : Ur2 = Ur_min else : Ur2 = Ur dEfd = 1 / Te * ( Ur2 - Ux - Ke * Efd ) F [ typ2 , : ] = c_ [ dEfd , dUf , dUr ] # Exciter type 3: # Exciter type 4: return F
832
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/dyn.py#L409-L464
[ "def", "OnAdjustVolume", "(", "self", ",", "event", ")", ":", "self", ".", "volume", "=", "self", ".", "player", ".", "audio_get_volume", "(", ")", "if", "event", ".", "GetWheelRotation", "(", ")", "<", "0", ":", "self", ".", "volume", "=", "max", "(", "0", ",", "self", ".", "volume", "-", "10", ")", "elif", "event", ".", "GetWheelRotation", "(", ")", ">", "0", ":", "self", ".", "volume", "=", "min", "(", "200", ",", "self", ".", "volume", "+", "10", ")", "self", ".", "player", ".", "audio_set_volume", "(", "self", ".", "volume", ")" ]
Governor model .
def governor ( self , Xgov , Pgov , Vgov ) : governors = self . governors omegas = 2 * pi * self . freq F = zeros ( Xgov . shape ) typ1 = [ g . generator . _i for g in governors if g . model == CONST_POWER ] typ2 = [ g . generator . _i for g in governors if g . model == GENERAL_IEEE ] # Governor type 1: constant power F [ typ1 , 0 ] = 0 # Governor type 2: IEEE general speed-governing system Pm = Xgov [ typ2 , 0 ] P = Xgov [ typ2 , 1 ] x = Xgov [ typ2 , 2 ] z = Xgov [ typ2 , 3 ] K = Pgov [ typ2 , 0 ] T1 = Pgov [ typ2 , 1 ] T2 = Pgov [ typ2 , 2 ] T3 = Pgov [ typ2 , 3 ] Pup = Pgov [ typ2 , 4 ] Pdown = Pgov [ typ2 , 5 ] Pmax = Pgov [ typ2 , 6 ] Pmin = Pgov [ typ2 , 7 ] P0 = Pgov [ typ2 , 8 ] omega = Vgov [ typ2 , 0 ] dx = K * ( - 1 / T1 * x + ( 1 - T2 / T1 ) * ( omega - omegas ) ) dP = 1 / T1 * x + T2 / T1 * ( omega - omegas ) y = 1 / T3 * ( P0 - P - Pm ) y2 = y if sum ( flatnonzero ( y > Pup ) ) >= 1 : y2 = ( 1 - flatnonzero ( y > Pup ) ) * y2 + flatnonzero ( y > Pup ) * Pup if sum ( flatnonzero ( y < Pdown ) ) >= 1 : y2 = ( 1 - flatnonzero ( y < Pdown ) ) * y2 + flatnonzero ( y < Pdown ) * Pdown dz = y2 dPm = y2 if sum ( flatnonzero ( z > Pmax ) ) >= 1 : dPm = ( 1 - flatnonzero ( z > Pmax ) ) * dPm + flatnonzero ( z > Pmax ) * 0 if sum ( flatnonzero ( z < Pmin ) ) >= 1 : dPm = ( 1 - flatnonzero ( z < Pmin ) ) * dPm + flatnonzero ( z < Pmin ) * 0 F [ typ2 , : ] = c_ [ dPm , dP , dx , dz ] # Governor type 3: # Governor type 4: return F
833
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/dyn.py#L467-L530
[ "def", "issueViaEmail", "(", "self", ",", "issuer", ",", "email", ",", "product", ",", "templateData", ",", "domainName", ",", "httpPort", "=", "80", ")", ":", "ticket", "=", "self", ".", "createTicket", "(", "issuer", ",", "unicode", "(", "email", ",", "'ascii'", ")", ",", "product", ")", "nonce", "=", "ticket", ".", "nonce", "signupInfo", "=", "{", "'from'", ":", "'signup@'", "+", "domainName", ",", "'to'", ":", "email", ",", "'date'", ":", "rfc822", ".", "formatdate", "(", ")", ",", "'message-id'", ":", "smtp", ".", "messageid", "(", ")", ",", "'link'", ":", "self", ".", "ticketLink", "(", "domainName", ",", "httpPort", ",", "nonce", ")", "}", "msg", "=", "templateData", "%", "signupInfo", "return", "ticket", ",", "_sendEmail", "(", "signupInfo", "[", "'from'", "]", ",", "email", ",", "msg", ")" ]
Generator model .
def generator ( self , Xgen , Xexc , Xgov , Vgen ) : generators = self . dyn_generators omegas = 2 * pi * self . freq F = zeros ( Xgen . shape ) typ1 = [ g . _i for g in generators if g . model == CLASSICAL ] typ2 = [ g . _i for g in generators if g . model == FOURTH_ORDER ] # Generator type 1: classical model omega = Xgen [ typ1 , 1 ] Pm0 = Xgov [ typ1 , 0 ] H = array ( [ g . h for g in generators ] ) [ typ1 ] D = array ( [ g . d for g in generators ] ) [ typ1 ] Pe = Vgen [ typ1 , 2 ] ddelta = omega = omegas domega = pi * self . freq / H * ( - D * ( omega - omegas ) + Pm0 - Pe ) dEq = zeros ( len ( typ1 ) ) F [ typ1 , : ] = c_ [ ddelta , domega , dEq ] # Generator type 2: 4th order model omega = Xgen [ typ2 , 1 ] Eq_tr = Xgen [ typ2 , 2 ] Ed_tr = Xgen [ typ2 , 3 ] H = array ( [ g . h for g in generators ] ) D = array ( [ g . d for g in generators ] ) xd = array ( [ g . xd for g in generators ] ) xq = array ( [ g . xq for g in generators ] ) xd_tr = array ( [ g . xd_tr for g in generators ] ) xq_tr = array ( [ g . xq_tr for g in generators ] ) Td0_tr = array ( [ g . td for g in generators ] ) Tq0_tr = array ( [ g . tq for g in generators ] ) Id = Vgen [ typ2 , 0 ] Iq = Vgen [ typ2 , 1 ] Pe = Vgen [ typ2 , 2 ] Efd = Xexc [ typ2 , 0 ] Pm = Xgov [ typ2 , 0 ] ddelta = omega - omegas domega = pi * self . freq / H * ( - D * ( omega - omegas ) + Pm - Pe ) dEq = 1 / Td0_tr * ( Efd - Eq_tr + ( xd - xd_tr ) * Id ) dEd = 1 / Tq0_tr * ( - Ed_tr - ( xq - xq_tr ) * Iq ) F [ typ2 , : ] = c_ [ ddelta , domega , dEq , dEd ] # Generator type 3: # Generator type 4: return F
834
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/dyn.py#L533-L595
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Writes case data to file in ReStructuredText format .
def _write_data ( self , file ) : self . write_case_data ( file ) file . write ( "Bus Data\n" ) file . write ( "-" * 8 + "\n" ) self . write_bus_data ( file ) file . write ( "\n" ) file . write ( "Branch Data\n" ) file . write ( "-" * 11 + "\n" ) self . write_branch_data ( file ) file . write ( "\n" ) file . write ( "Generator Data\n" ) file . write ( "-" * 14 + "\n" ) self . write_generator_data ( file ) file . write ( "\n" )
835
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/rst.py#L40-L58
[ "def", "utilization", "(", "prev", ",", "curr", ",", "counters", ")", ":", "busy_prop", ",", "idle_prop", "=", "counters", "pb", "=", "getattr", "(", "prev", ",", "busy_prop", ")", "pi", "=", "getattr", "(", "prev", ",", "idle_prop", ")", "cb", "=", "getattr", "(", "curr", ",", "busy_prop", ")", "ci", "=", "getattr", "(", "curr", ",", "idle_prop", ")", "db", "=", "minus", "(", "cb", ",", "pb", ")", "di", "=", "minus", "(", "ci", ",", "pi", ")", "return", "mul", "(", "div", "(", "db", ",", "add", "(", "db", ",", "di", ")", ")", ",", "100", ")" ]
Writes bus data to a ReST table .
def write_bus_data ( self , file ) : report = CaseReport ( self . case ) buses = self . case . buses col_width = 8 col_width_2 = col_width * 2 + 1 col1_width = 6 sep = "=" * 6 + " " + ( "=" * col_width + " " ) * 6 + "\n" file . write ( sep ) # Line one of column headers file . write ( "Name" . center ( col1_width ) + " " ) file . write ( "Voltage (pu)" . center ( col_width_2 ) + " " ) file . write ( "Generation" . center ( col_width_2 ) + " " ) file . write ( "Load" . center ( col_width_2 ) + " " ) file . write ( "\n" ) file . write ( "-" * col1_width + " " + ( "-" * col_width_2 + " " ) * 3 + "\n" ) # Line two of column header file . write ( ".." . ljust ( col1_width ) + " " ) file . write ( "Amp" . center ( col_width ) + " " ) file . write ( "Phase" . center ( col_width ) + " " ) file . write ( "P (MW)" . center ( col_width ) + " " ) file . write ( "Q (MVAr)" . center ( col_width ) + " " ) file . write ( "P (MW)" . center ( col_width ) + " " ) file . write ( "Q (MVAr)" . center ( col_width ) + " " ) file . write ( "\n" ) file . write ( sep ) # Bus rows for bus in buses : file . write ( bus . name [ : col1_width ] . ljust ( col1_width ) ) file . write ( " %8.3f" % bus . v_magnitude ) file . write ( " %8.3f" % bus . v_angle ) file . write ( " %8.2f" % self . case . s_supply ( bus ) . real ) file . write ( " %8.2f" % self . case . s_supply ( bus ) . imag ) file . write ( " %8.2f" % self . case . s_demand ( bus ) . real ) file . write ( " %8.2f" % self . case . s_demand ( bus ) . imag ) file . write ( "\n" ) # Totals # file.write("..".ljust(col1_width) + " ") # file.write(("..".ljust(col_width) + " ")*2) # file.write(("_"*col_width + " ")*4 + "\n") file . write ( ".." . ljust ( col1_width ) + " " + ".." . ljust ( col_width ) + " " ) file . write ( "*Total:*" . rjust ( col_width ) + " " ) ptot = report . actual_pgen qtot = report . actual_qgen file . write ( "%8.2f " % ptot ) file . write ( "%8.2f " % qtot ) file . write ( "%8.2f " % report . p_demand ) file . write ( "%8.2f " % report . q_demand ) file . write ( "\n" ) file . write ( sep ) del report
836
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/rst.py#L87-L146
[ "def", "__isOpenThreadWpanRunning", "(", "self", ")", ":", "print", "'call __isOpenThreadWpanRunning'", "if", "self", ".", "__stripValue", "(", "self", ".", "__sendCommand", "(", "WPANCTL_CMD", "+", "'getprop -v NCP:State'", ")", "[", "0", "]", ")", "==", "'associated'", ":", "print", "'*****OpenThreadWpan is running'", "return", "True", "else", ":", "print", "'*****Wrong OpenThreadWpan state'", "return", "False" ]
Writes component numbers to a table .
def write_how_many ( self , file ) : report = CaseReport ( self . case ) # Map component labels to attribute names components = [ ( "Bus" , "n_buses" ) , ( "Generator" , "n_generators" ) , ( "Committed Generator" , "n_online_generators" ) , ( "Load" , "n_loads" ) , ( "Fixed Load" , "n_fixed_loads" ) , ( "Despatchable Load" , "n_online_vloads" ) , ( "Shunt" , "n_shunts" ) , ( "Branch" , "n_branches" ) , ( "Transformer" , "n_transformers" ) , ( "Inter-tie" , "n_interties" ) , ( "Area" , "n_areas" ) ] # Column 1 width longest = max ( [ len ( c [ 0 ] ) for c in components ] ) col1_header = "Object" col1_width = longest col2_header = "Quantity" col2_width = len ( col2_header ) # Row separator sep = "=" * col1_width + " " + "=" * col2_width + "\n" # Row headers file . write ( sep ) file . write ( col1_header . center ( col1_width ) ) file . write ( " " ) file . write ( "%s\n" % col2_header . center ( col2_width ) ) file . write ( sep ) # Rows for label , attr in components : col2_value = str ( getattr ( report , attr ) ) file . write ( "%s %s\n" % ( label . ljust ( col1_width ) , col2_value . rjust ( col2_width ) ) ) else : file . write ( sep ) file . write ( "\n" ) del report
837
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/rst.py#L312-L355
[ "def", "get_license_assignment_manager", "(", "service_instance", ")", ":", "log", ".", "debug", "(", "'Retrieving license assignment manager'", ")", "try", ":", "lic_assignment_manager", "=", "service_instance", ".", "content", ".", "licenseManager", ".", "licenseAssignmentManager", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "if", "not", "lic_assignment_manager", ":", "raise", "salt", ".", "exceptions", ".", "VMwareObjectRetrievalError", "(", "'License assignment manager was not retrieved'", ")", "return", "lic_assignment_manager" ]
Writes minimum and maximum values to a table .
def write_min_max ( self , file ) : report = CaseReport ( self . case ) col1_header = "Attribute" col1_width = 19 col2_header = "Minimum" col3_header = "Maximum" col_width = 22 sep = "=" * col1_width + " " + "=" * col_width + " " + "=" * col_width + "\n" # Row headers file . write ( sep ) file . write ( "%s" % col1_header . center ( col1_width ) ) file . write ( " " ) file . write ( "%s" % col2_header . center ( col_width ) ) file . write ( " " ) file . write ( "%s" % col3_header . center ( col_width ) ) file . write ( "\n" ) file . write ( sep ) # Rows min_val , min_i = getattr ( report , "min_v_magnitude" ) max_val , max_i = getattr ( report , "max_v_magnitude" ) file . write ( "%s %7.3f p.u. @ bus %2d %7.3f p.u. @ bus %2d\n" % ( "Voltage Amplitude" . ljust ( col1_width ) , min_val , min_i , max_val , max_i ) ) min_val , min_i = getattr ( report , "min_v_angle" ) max_val , max_i = getattr ( report , "max_v_angle" ) file . write ( "%s %16.3f %16.3f\n" % ( "Voltage Phase Angle" . ljust ( col1_width ) , min_val , max_val ) ) file . write ( sep ) file . write ( "\n" ) del report
838
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/rst.py#L438-L478
[ "def", "stop", "(", "self", ")", ":", "for", "client", "in", "self", ".", "_snippet_clients", ".", "values", "(", ")", ":", "if", "client", ".", "is_alive", ":", "self", ".", "_device", ".", "log", ".", "debug", "(", "'Stopping SnippetClient<%s>.'", ",", "client", ".", "package", ")", "client", ".", "stop_app", "(", ")", "else", ":", "self", ".", "_device", ".", "log", ".", "debug", "(", "'Not stopping SnippetClient<%s> because it is not alive.'", ",", "client", ".", "package", ")" ]
Return a name unique within a context based on the specified name .
def make_unique_name ( base , existing = [ ] , format = "%s_%s" ) : count = 2 name = base while name in existing : name = format % ( base , count ) count += 1 return name
839
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/parsing_util.py#L167-L180
[ "def", "_merge_meta", "(", "self", ",", "encoded_meta", ",", "meta", ")", ":", "new_meta", "=", "None", "if", "meta", ":", "_meta", "=", "self", ".", "_decode_meta", "(", "encoded_meta", ")", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "meta", ")", ":", "if", "value", "is", "None", ":", "_meta", ".", "pop", "(", "key", ",", "None", ")", "else", ":", "_meta", "[", "key", "]", "=", "value", "new_meta", "=", "self", ".", "_encode_meta", "(", "_meta", ")", "return", "new_meta" ]
calls antlr4 on grammar file
def call_antlr4 ( arg ) : # pylint: disable=unused-argument, unused-variable antlr_path = os . path . join ( ROOT_DIR , "java" , "antlr-4.7-complete.jar" ) classpath = os . pathsep . join ( [ "." , "{:s}" . format ( antlr_path ) , "$CLASSPATH" ] ) generated = os . path . join ( ROOT_DIR , 'src' , 'pymoca' , 'generated' ) cmd = "java -Xmx500M -cp \"{classpath:s}\" org.antlr.v4.Tool {arg:s}" " -o {generated:s} -visitor -Dlanguage=Python3" . format ( * * locals ( ) ) print ( cmd ) proc = subprocess . Popen ( cmd . split ( ) , cwd = os . path . join ( ROOT_DIR , 'src' , 'pymoca' ) ) proc . communicate ( ) with open ( os . path . join ( generated , '__init__.py' ) , 'w' ) as fid : fid . write ( '' )
840
https://github.com/pymoca/pymoca/blob/14b5eb7425e96689de6cc5c10f400895d586a978/setup.py#L74-L86
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
Setup the package .
def setup_package ( ) : with open ( 'requirements.txt' , 'r' ) as req_file : install_reqs = req_file . read ( ) . split ( '\n' ) cmdclass_ = { 'antlr' : AntlrBuildCommand } cmdclass_ . update ( versioneer . get_cmdclass ( ) ) setup ( version = versioneer . get_version ( ) , name = 'pymoca' , maintainer = "James Goppert" , maintainer_email = "[email protected]" , description = DOCLINES [ 0 ] , long_description = "\n" . join ( DOCLINES [ 2 : ] ) , url = 'https://github.com/pymoca/pymoca' , author = 'James Goppert' , author_email = '[email protected]' , download_url = 'https://github.com/pymoca/pymoca' , license = 'BSD' , classifiers = [ _f for _f in CLASSIFIERS . split ( '\n' ) if _f ] , platforms = [ "Windows" , "Linux" , "Solaris" , "Mac OS-X" , "Unix" ] , install_requires = install_reqs , tests_require = [ 'coverage >= 3.7.1' , 'nose >= 1.3.1' ] , test_suite = 'nose.collector' , python_requires = '>=3.5' , packages = find_packages ( "src" ) , package_dir = { "" : "src" } , include_package_data = True , cmdclass = cmdclass_ )
841
https://github.com/pymoca/pymoca/blob/14b5eb7425e96689de6cc5c10f400895d586a978/setup.py#L89-L121
[ "def", "apply_binding", "(", "self", ",", "binding", ",", "msg_str", ",", "destination", "=", "\"\"", ",", "relay_state", "=", "\"\"", ",", "response", "=", "False", ",", "sign", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# unless if BINDING_HTTP_ARTIFACT", "if", "response", ":", "typ", "=", "\"SAMLResponse\"", "else", ":", "typ", "=", "\"SAMLRequest\"", "if", "binding", "==", "BINDING_HTTP_POST", ":", "logger", ".", "info", "(", "\"HTTP POST\"", ")", "# if self.entity_type == 'sp':", "# info = self.use_http_post(msg_str, destination, relay_state,", "# typ)", "# info[\"url\"] = destination", "# info[\"method\"] = \"POST\"", "# else:", "info", "=", "self", ".", "use_http_form_post", "(", "msg_str", ",", "destination", ",", "relay_state", ",", "typ", ")", "info", "[", "\"url\"", "]", "=", "destination", "info", "[", "\"method\"", "]", "=", "\"POST\"", "elif", "binding", "==", "BINDING_HTTP_REDIRECT", ":", "logger", ".", "info", "(", "\"HTTP REDIRECT\"", ")", "sigalg", "=", "kwargs", ".", "get", "(", "\"sigalg\"", ")", "if", "sign", "and", "sigalg", ":", "signer", "=", "self", ".", "sec", ".", "sec_backend", ".", "get_signer", "(", "sigalg", ")", "else", ":", "signer", "=", "None", "info", "=", "self", ".", "use_http_get", "(", "msg_str", ",", "destination", ",", "relay_state", ",", "typ", ",", "signer", "=", "signer", ",", "*", "*", "kwargs", ")", "info", "[", "\"url\"", "]", "=", "str", "(", "destination", ")", "info", "[", "\"method\"", "]", "=", "\"GET\"", "elif", "binding", "==", "BINDING_SOAP", "or", "binding", "==", "BINDING_PAOS", ":", "info", "=", "self", ".", "use_soap", "(", "msg_str", ",", "destination", ",", "sign", "=", "sign", ",", "*", "*", "kwargs", ")", "elif", "binding", "==", "BINDING_URI", ":", "info", "=", "self", ".", "use_http_uri", "(", "msg_str", ",", "typ", ",", "destination", ")", "elif", "binding", "==", "BINDING_HTTP_ARTIFACT", ":", "if", "response", ":", "info", "=", "self", ".", "use_http_artifact", "(", "msg_str", ",", "destination", ",", "relay_state", ")", "info", "[", "\"method\"", "]", "=", "\"GET\"", "info", "[", "\"status\"", "]", "=", "302", "else", ":", "info", "=", "self", ".", "use_http_artifact", "(", "msg_str", ",", "destination", ",", "relay_state", ")", "else", ":", "raise", "SAMLError", "(", "\"Unknown binding type: %s\"", "%", "binding", ")", "return", "info" ]
Creates the dialog body . Returns the widget that should have initial focus .
def body ( self , frame ) : master = Frame ( self ) master . pack ( padx = 5 , pady = 0 , expand = 1 , fill = BOTH ) title = Label ( master , text = "Buses" ) title . pack ( side = TOP ) bus_lb = self . bus_lb = Listbox ( master , selectmode = SINGLE , width = 10 ) bus_lb . pack ( side = LEFT ) for bus in self . case . buses : bus_lb . insert ( END , bus . name ) bus_lb . bind ( "<<ListboxSelect>>" , self . on_bus ) self . bus_params = BusProperties ( master ) return bus_lb
842
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/pylontk.py#L538-L558
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Solves an optimal power flow and returns a results dictionary .
def solve ( self , solver_klass = None ) : # Start the clock. t0 = time ( ) # Build an OPF model with variables and constraints. om = self . _construct_opf_model ( self . case ) if om is None : return { "converged" : False , "output" : { "message" : "No Ref Bus." } } # Call the specific solver. # if self.opt["verbose"]: # print '\nPYLON Version %s, %s', "0.4.2", "April 2010" if solver_klass is not None : result = solver_klass ( om , opt = self . opt ) . solve ( ) elif self . dc : # if self.opt["verbose"]: # print ' -- DC Optimal Power Flow\n' result = DCOPFSolver ( om , opt = self . opt ) . solve ( ) else : # if self.opt["verbose"]: # print ' -- AC Optimal Power Flow\n' result = PIPSSolver ( om , opt = self . opt ) . solve ( ) result [ "elapsed" ] = time ( ) - t0 if self . opt . has_key ( "verbose" ) : if self . opt [ "verbose" ] : logger . info ( "OPF completed in %.3fs." % result [ "elapsed" ] ) return result
843
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L79-L110
[ "def", "concatenate_not_none", "(", "l", ",", "axis", "=", "0", ")", ":", "# Get the indexes of the arrays in the list", "mask", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "l", ")", ")", ":", "if", "l", "[", "i", "]", "is", "not", "None", ":", "mask", ".", "append", "(", "i", ")", "# Concatenate them", "l_stacked", "=", "np", ".", "concatenate", "(", "[", "l", "[", "i", "]", "for", "i", "in", "mask", "]", ",", "axis", "=", "axis", ")", "return", "l_stacked" ]
Returns an OPF model .
def _construct_opf_model ( self , case ) : # Zero the case result attributes. self . case . reset ( ) base_mva = case . base_mva # Check for one reference bus. oneref , refs = self . _ref_check ( case ) if not oneref : #return {"status": "error"} None # Remove isolated components. bs , ln , gn = self . _remove_isolated ( case ) # Update bus indexes. self . case . index_buses ( bs ) # Convert single-block piecewise-linear costs into linear polynomial. gn = self . _pwl1_to_poly ( gn ) # Set-up initial problem variables. Va = self . _get_voltage_angle_var ( refs , bs ) Pg = self . _get_pgen_var ( gn , base_mva ) if self . dc : # DC model. # Get the susceptance matrices and phase shift injection vectors. B , Bf , Pbusinj , Pfinj = self . case . makeBdc ( bs , ln ) # Power mismatch constraints (B*Va + Pg = Pd). Pmis = self . _power_mismatch_dc ( bs , gn , B , Pbusinj , base_mva ) # Branch flow limit constraints. Pf , Pt = self . _branch_flow_dc ( ln , Bf , Pfinj , base_mva ) else : # Set-up additional AC-OPF problem variables. Vm = self . _get_voltage_magnitude_var ( bs , gn ) Qg = self . _get_qgen_var ( gn , base_mva ) Pmis , Qmis , Sf , St = self . _nln_constraints ( len ( bs ) , len ( ln ) ) vl = self . _const_pf_constraints ( gn , base_mva ) # TODO: Generator PQ capability curve constraints. # PQh, PQl = self._pq_capability_curve_constraints(gn) # Branch voltage angle difference limits. ang = self . _voltage_angle_diff_limit ( bs , ln ) if self . dc : vars = [ Va , Pg ] constraints = [ Pmis , Pf , Pt , ang ] else : vars = [ Va , Vm , Pg , Qg ] constraints = [ Pmis , Qmis , Sf , St , #PQh, PQL, vl , ang ] # Piece-wise linear generator cost constraints. y , ycon = self . _pwl_gen_costs ( gn , base_mva ) if ycon is not None : vars . append ( y ) constraints . append ( ycon ) # Add variables and constraints to the OPF model object. opf = OPFModel ( case ) opf . add_vars ( vars ) opf . add_constraints ( constraints ) if self . dc : # user data opf . _Bf = Bf opf . _Pfinj = Pfinj return opf
844
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L116-L190
[ "def", "append_to_multiple", "(", "self", ",", "d", ",", "value", ",", "selector", ",", "data_columns", "=", "None", ",", "axes", "=", "None", ",", "dropna", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "axes", "is", "not", "None", ":", "raise", "TypeError", "(", "\"axes is currently not accepted as a parameter to\"", "\" append_to_multiple; you can create the \"", "\"tables independently instead\"", ")", "if", "not", "isinstance", "(", "d", ",", "dict", ")", ":", "raise", "ValueError", "(", "\"append_to_multiple must have a dictionary specified as the \"", "\"way to split the value\"", ")", "if", "selector", "not", "in", "d", ":", "raise", "ValueError", "(", "\"append_to_multiple requires a selector that is in passed dict\"", ")", "# figure out the splitting axis (the non_index_axis)", "axis", "=", "list", "(", "set", "(", "range", "(", "value", ".", "ndim", ")", ")", "-", "set", "(", "_AXES_MAP", "[", "type", "(", "value", ")", "]", ")", ")", "[", "0", "]", "# figure out how to split the value", "remain_key", "=", "None", "remain_values", "=", "[", "]", "for", "k", ",", "v", "in", "d", ".", "items", "(", ")", ":", "if", "v", "is", "None", ":", "if", "remain_key", "is", "not", "None", ":", "raise", "ValueError", "(", "\"append_to_multiple can only have one value in d that \"", "\"is None\"", ")", "remain_key", "=", "k", "else", ":", "remain_values", ".", "extend", "(", "v", ")", "if", "remain_key", "is", "not", "None", ":", "ordered", "=", "value", ".", "axes", "[", "axis", "]", "ordd", "=", "ordered", ".", "difference", "(", "Index", "(", "remain_values", ")", ")", "ordd", "=", "sorted", "(", "ordered", ".", "get_indexer", "(", "ordd", ")", ")", "d", "[", "remain_key", "]", "=", "ordered", ".", "take", "(", "ordd", ")", "# data_columns", "if", "data_columns", "is", "None", ":", "data_columns", "=", "d", "[", "selector", "]", "# ensure rows are synchronized across the tables", "if", "dropna", ":", "idxs", "=", "(", "value", "[", "cols", "]", ".", "dropna", "(", "how", "=", "'all'", ")", ".", "index", "for", "cols", "in", "d", ".", "values", "(", ")", ")", "valid_index", "=", "next", "(", "idxs", ")", "for", "index", "in", "idxs", ":", "valid_index", "=", "valid_index", ".", "intersection", "(", "index", ")", "value", "=", "value", ".", "loc", "[", "valid_index", "]", "# append", "for", "k", ",", "v", "in", "d", ".", "items", "(", ")", ":", "dc", "=", "data_columns", "if", "k", "==", "selector", "else", "None", "# compute the val", "val", "=", "value", ".", "reindex", "(", "v", ",", "axis", "=", "axis", ")", "self", ".", "append", "(", "k", ",", "val", ",", "data_columns", "=", "dc", ",", "*", "*", "kwargs", ")" ]
Checks that there is only one reference bus .
def _ref_check ( self , case ) : refs = [ bus . _i for bus in case . buses if bus . type == REFERENCE ] if len ( refs ) == 1 : return True , refs else : logger . error ( "OPF requires a single reference bus." ) return False , refs
845
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L193-L202
[ "def", "_initialize", "(", "self", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Start initializing data from %s\"", ",", "self", ".", "url", ")", "resp", "=", "self", ".", "get", "(", "self", ".", "url", ",", "verify", "=", "False", ",", "proxies", "=", "self", ".", "rtc_obj", ".", "proxies", ",", "headers", "=", "self", ".", "rtc_obj", ".", "headers", ")", "self", ".", "__initialize", "(", "resp", ")", "self", ".", "log", ".", "info", "(", "\"Finish the initialization for <%s %s>\"", ",", "self", ".", "__class__", ".", "__name__", ",", "self", ")" ]
Returns non - isolated case components .
def _remove_isolated ( self , case ) : # case.deactivate_isolated() buses = case . connected_buses branches = case . online_branches gens = case . online_generators return buses , branches , gens
846
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L205-L213
[ "def", "user_deletemedia", "(", "mediaids", ",", "*", "*", "kwargs", ")", ":", "conn_args", "=", "_login", "(", "*", "*", "kwargs", ")", "ret", "=", "{", "}", "try", ":", "if", "conn_args", ":", "method", "=", "'user.deletemedia'", "if", "not", "isinstance", "(", "mediaids", ",", "list", ")", ":", "mediaids", "=", "[", "mediaids", "]", "params", "=", "mediaids", "ret", "=", "_query", "(", "method", ",", "params", ",", "conn_args", "[", "'url'", "]", ",", "conn_args", "[", "'auth'", "]", ")", "return", "ret", "[", "'result'", "]", "[", "'mediaids'", "]", "else", ":", "raise", "KeyError", "except", "KeyError", ":", "return", "ret" ]
Converts single - block piecewise - linear costs into linear polynomial .
def _pwl1_to_poly ( self , generators ) : for g in generators : if ( g . pcost_model == PW_LINEAR ) and ( len ( g . p_cost ) == 2 ) : g . pwl_to_poly ( ) return generators
847
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L216-L224
[ "def", "_normalize_port", "(", "scheme", ",", "port", ")", ":", "if", "not", "scheme", ":", "return", "port", "if", "port", "and", "port", "!=", "DEFAULT_PORT", "[", "scheme", "]", ":", "return", "port" ]
Returns the voltage angle variable set .
def _get_voltage_angle_var ( self , refs , buses ) : Va = array ( [ b . v_angle * ( pi / 180.0 ) for b in buses ] ) Vau = Inf * ones ( len ( buses ) ) Val = - Vau Vau [ refs ] = Va [ refs ] Val [ refs ] = Va [ refs ] return Variable ( "Va" , len ( buses ) , Va , Val , Vau )
848
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L230-L240
[ "def", "update_custom_field_options", "(", "self", ",", "custom_field_key", ",", "new_options", ",", "keep_existing_options", ")", ":", "custom_field_key", "=", "quote", "(", "custom_field_key", ",", "''", ")", "body", "=", "{", "\"Options\"", ":", "new_options", ",", "\"KeepExistingOptions\"", ":", "keep_existing_options", "}", "response", "=", "self", ".", "_put", "(", "self", ".", "uri_for", "(", "\"customfields/%s/options\"", "%", "custom_field_key", ")", ",", "json", ".", "dumps", "(", "body", ")", ")" ]
Returns the voltage magnitude variable set .
def _get_voltage_magnitude_var ( self , buses , generators ) : Vm = array ( [ b . v_magnitude for b in buses ] ) # For buses with generators initialise Vm from gen data. for g in generators : Vm [ g . bus . _i ] = g . v_magnitude Vmin = array ( [ b . v_min for b in buses ] ) Vmax = array ( [ b . v_max for b in buses ] ) return Variable ( "Vm" , len ( buses ) , Vm , Vmin , Vmax )
849
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L243-L255
[ "def", "update_notebook_actions", "(", "self", ")", ":", "if", "self", ".", "recent_notebooks", ":", "self", ".", "clear_recent_notebooks_action", ".", "setEnabled", "(", "True", ")", "else", ":", "self", ".", "clear_recent_notebooks_action", ".", "setEnabled", "(", "False", ")", "client", "=", "self", ".", "get_current_client", "(", ")", "if", "client", ":", "if", "client", ".", "get_filename", "(", ")", "!=", "WELCOME", ":", "self", ".", "save_as_action", ".", "setEnabled", "(", "True", ")", "self", ".", "open_console_action", ".", "setEnabled", "(", "True", ")", "self", ".", "options_menu", ".", "clear", "(", ")", "add_actions", "(", "self", ".", "options_menu", ",", "self", ".", "menu_actions", ")", "return", "self", ".", "save_as_action", ".", "setEnabled", "(", "False", ")", "self", ".", "open_console_action", ".", "setEnabled", "(", "False", ")", "self", ".", "options_menu", ".", "clear", "(", ")", "add_actions", "(", "self", ".", "options_menu", ",", "self", ".", "menu_actions", ")" ]
Returns the generator active power set - point variable .
def _get_pgen_var ( self , generators , base_mva ) : Pg = array ( [ g . p / base_mva for g in generators ] ) Pmin = array ( [ g . p_min / base_mva for g in generators ] ) Pmax = array ( [ g . p_max / base_mva for g in generators ] ) return Variable ( "Pg" , len ( generators ) , Pg , Pmin , Pmax )
850
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L258-L266
[ "def", "get_mentions", "(", "self", ",", "docs", "=", "None", ",", "sort", "=", "False", ")", ":", "result", "=", "[", "]", "if", "docs", ":", "docs", "=", "docs", "if", "isinstance", "(", "docs", ",", "(", "list", ",", "tuple", ")", ")", "else", "[", "docs", "]", "# Get cands from all splits", "for", "mention_class", "in", "self", ".", "mention_classes", ":", "mentions", "=", "(", "self", ".", "session", ".", "query", "(", "mention_class", ")", ".", "filter", "(", "mention_class", ".", "document_id", ".", "in_", "(", "[", "doc", ".", "id", "for", "doc", "in", "docs", "]", ")", ")", ".", "order_by", "(", "mention_class", ".", "id", ")", ".", "all", "(", ")", ")", "if", "sort", ":", "mentions", "=", "sorted", "(", "mentions", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ".", "get_stable_id", "(", ")", ")", "result", ".", "append", "(", "mentions", ")", "else", ":", "for", "mention_class", "in", "self", ".", "mention_classes", ":", "mentions", "=", "(", "self", ".", "session", ".", "query", "(", "mention_class", ")", ".", "order_by", "(", "mention_class", ".", "id", ")", ".", "all", "(", ")", ")", "if", "sort", ":", "mentions", "=", "sorted", "(", "mentions", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ".", "get_stable_id", "(", ")", ")", "result", ".", "append", "(", "mentions", ")", "return", "result" ]
Returns the generator reactive power variable set .
def _get_qgen_var ( self , generators , base_mva ) : Qg = array ( [ g . q / base_mva for g in generators ] ) Qmin = array ( [ g . q_min / base_mva for g in generators ] ) Qmax = array ( [ g . q_max / base_mva for g in generators ] ) return Variable ( "Qg" , len ( generators ) , Qg , Qmin , Qmax )
851
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L269-L277
[ "def", "guess_extension", "(", "amimetype", ",", "normalize", "=", "False", ")", ":", "ext", "=", "_mimes", ".", "guess_extension", "(", "amimetype", ")", "if", "ext", "and", "normalize", ":", "# Normalize some common magic mis-interpreation", "ext", "=", "{", "'.asc'", ":", "'.txt'", ",", "'.obj'", ":", "'.bin'", "}", ".", "get", "(", "ext", ",", "ext", ")", "from", "invenio", ".", "legacy", ".", "bibdocfile", ".", "api_normalizer", "import", "normalize_format", "return", "normalize_format", "(", "ext", ")", "return", "ext" ]
Returns non - linear constraints for OPF .
def _nln_constraints ( self , nb , nl ) : Pmis = NonLinearConstraint ( "Pmis" , nb ) Qmis = NonLinearConstraint ( "Qmis" , nb ) Sf = NonLinearConstraint ( "Sf" , nl ) St = NonLinearConstraint ( "St" , nl ) return Pmis , Qmis , Sf , St
852
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L283-L291
[ "def", "iterstruct", "(", "self", ")", ":", "from", "rowgenerators", ".", "rowpipe", ".", "json", "import", "add_to_struct", "json_headers", "=", "self", ".", "json_headers", "for", "row", "in", "islice", "(", "self", ",", "1", ",", "None", ")", ":", "# islice skips header", "d", "=", "{", "}", "for", "pos", ",", "jh", "in", "json_headers", ":", "add_to_struct", "(", "d", ",", "jh", ",", "row", "[", "pos", "]", ")", "yield", "d" ]
Returns a linear constraint enforcing constant power factor for dispatchable loads .
def _const_pf_constraints ( self , gn , base_mva ) : ivl = array ( [ i for i , g in enumerate ( gn ) if g . is_load and ( g . q_min != 0.0 or g . q_max != 0.0 ) ] ) vl = [ gn [ i ] for i in ivl ] nvl = len ( vl ) ng = len ( gn ) Pg = array ( [ g . p for g in vl ] ) / base_mva Qg = array ( [ g . q for g in vl ] ) / base_mva Pmin = array ( [ g . p_min for g in vl ] ) / base_mva Qmin = array ( [ g . q_min for g in vl ] ) / base_mva Qmax = array ( [ g . q_max for g in vl ] ) / base_mva # At least one of the Q limits must be zero (corresponding to Pmax==0). for g in vl : if g . qmin != 0.0 and g . q_max != 0.0 : logger . error ( "Either Qmin or Qmax must be equal to zero for " "each dispatchable load." ) # Initial values of PG and QG must be consistent with specified power # factor. This is to prevent a user from unknowingly using a case file # which would have defined a different power factor constraint under a # previous version which used PG and QG to define the power factor. Qlim = ( Qmin == 0.0 ) * Qmax + ( Qmax == 0.0 ) * Qmin if any ( abs ( Qg - Pg * Qlim / Pmin ) > 1e-6 ) : logger . error ( "For a dispatchable load, PG and QG must be " "consistent with the power factor defined by " "PMIN and the Q limits." ) # Make Avl, lvl, uvl, for lvl <= Avl * r_[Pg, Qg] <= uvl if nvl > 0 : xx = Pmin yy = Qlim pftheta = arctan2 ( yy , xx ) pc = sin ( pftheta ) qc = - cos ( pftheta ) ii = array ( [ range ( nvl ) , range ( nvl ) ] ) jj = r_ [ ivl , ivl + ng ] Avl = csr_matrix ( r_ [ pc , qc ] , ( ii , jj ) , ( nvl , 2 * ng ) ) lvl = zeros ( nvl ) uvl = lvl else : Avl = zeros ( ( 0 , 2 * ng ) ) lvl = array ( [ ] ) uvl = array ( [ ] ) return LinearConstraint ( "vl" , Avl , lvl , uvl , [ "Pg" , "Qg" ] )
853
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L330-L384
[ "def", "remove", "(", "self", ",", "uids", ":", "Iterable", "[", "int", "]", ")", "->", "None", ":", "for", "uid", "in", "uids", ":", "self", ".", "_recent", ".", "discard", "(", "uid", ")", "self", ".", "_flags", ".", "pop", "(", "uid", ",", "None", ")" ]
Returns the constraint on the branch voltage angle differences .
def _voltage_angle_diff_limit ( self , buses , branches ) : nb = len ( buses ) if not self . ignore_ang_lim : iang = [ i for i , b in enumerate ( branches ) if ( b . ang_min and ( b . ang_min > - 360.0 ) ) or ( b . ang_max and ( b . ang_max < 360.0 ) ) ] iangl = array ( [ i for i , b in enumerate ( branches ) if b . ang_min is not None ] ) [ iang ] iangh = array ( [ i for i , b in enumerate ( branches ) if b . ang_max is not None ] ) [ iang ] nang = len ( iang ) if nang > 0 : ii = range ( nang ) + range ( nang ) jjf = array ( [ b . from_bus . _i for b in branches ] ) [ iang ] jjt = array ( [ b . to_bus . _i for b in branches ] ) [ iang ] jj = r_ [ jjf , jjt ] Aang = csr_matrix ( r_ [ ones ( nang ) , - ones ( nang ) ] , ( ii , jj ) ) uang = Inf * ones ( nang ) lang = - uang lang [ iangl ] = array ( [ b . ang_min * ( pi / 180.0 ) for b in branches ] ) [ iangl ] uang [ iangh ] = array ( [ b . ang_max * ( pi / 180.0 ) for b in branches ] ) [ iangh ] else : # Aang = csr_matrix((0, nb), dtype=float64) # lang = array([], dtype=float64) # uang = array([], dtype=float64) Aang = zeros ( ( 0 , nb ) ) lang = array ( [ ] ) uang = array ( [ ] ) else : # Aang = csr_matrix((0, nb), dtype=float64) # lang = array([], dtype=float64) # uang = array([], dtype=float64) # iang = array([], dtype=float64) Aang = zeros ( ( 0 , nb ) ) lang = array ( [ ] ) uang = array ( [ ] ) return LinearConstraint ( "ang" , Aang , lang , uang , [ "Va" ] )
854
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L387-L430
[ "def", "make_param_dict_from_file", "(", "self", ",", "path_to_params", ")", ":", "# then we were given a path to a parameter file", "param_list", "=", "list", "(", "csv", ".", "reader", "(", "open", "(", "path_to_params", ",", "\"rb\"", ")", ")", ")", "# delete empty elements (if any)", "param_file", "=", "[", "x", "for", "x", "in", "param_list", "if", "x", "!=", "[", "]", "]", "# make dict of [wavenames] = raw_params", "name_list", "=", "[", "]", "param_list", "=", "[", "]", "# get header names for each param (names of param_list columns)", "param_colnames", "=", "param_file", "[", "0", "]", "[", "1", ":", "]", "# 0th element is \"Name\" or \"Wavename\"", "# start from 1. (row 0 is the header)", "for", "i", "in", "np", ".", "arange", "(", "1", ",", "len", "(", "param_file", ")", ")", ":", "name_list", ".", "append", "(", "param_file", "[", "i", "]", "[", "0", "]", ")", "param_list", ".", "append", "(", "param_file", "[", "i", "]", "[", "1", ":", "]", ")", "# remove ' ' blank spaces from param_list", "param_list", "=", "[", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "y", "]", "for", "y", "in", "param_list", "]", "param_dict", "=", "{", "}", "# i loops through param_colnames, j loops thru param values per wave", "for", "i", "in", "np", ".", "arange", "(", "0", ",", "len", "(", "param_colnames", ")", ")", ":", "param_dict", "[", "param_colnames", "[", "i", "]", "]", "=", "[", "]", "for", "j", "in", "np", ".", "arange", "(", "0", ",", "len", "(", "name_list", ")", ")", ":", "param_dict", "[", "param_colnames", "[", "i", "]", "]", ".", "append", "(", "param_list", "[", "j", "]", "[", "i", "]", ")", "# now we have param_dict, and name_list", "self", ".", "_param_dict", "=", "param_dict", "self", ".", "_row_names", "=", "name_list" ]
Adds a variable to the model .
def add_var ( self , var ) : if var . name in [ v . name for v in self . vars ] : logger . error ( "Variable set named '%s' already exists." % var . name ) return var . i1 = self . var_N var . iN = self . var_N + var . N - 1 self . vars . append ( var )
855
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L731-L740
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
Returns the variable set with the given name .
def get_var ( self , name ) : for var in self . vars : if var . name == name : return var else : raise ValueError
856
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L750-L757
[ "def", "isSupportedContent", "(", "cls", ",", "fileContent", ")", ":", "magic", "=", "bytearray", "(", "fileContent", ")", "[", ":", "4", "]", "return", "magic", "==", "p", "(", "'>I'", ",", "0xfeedface", ")", "or", "magic", "==", "p", "(", "'>I'", ",", "0xfeedfacf", ")", "or", "magic", "==", "p", "(", "'<I'", ",", "0xfeedface", ")", "or", "magic", "==", "p", "(", "'<I'", ",", "0xfeedfacf", ")" ]
Returns the linear constraints .
def linear_constraints ( self ) : if self . lin_N == 0 : return None , array ( [ ] ) , array ( [ ] ) A = lil_matrix ( ( self . lin_N , self . var_N ) , dtype = float64 ) l = - Inf * ones ( self . lin_N ) u = - l for lin in self . lin_constraints : if lin . N : # non-zero number of rows to add Ak = lin . A # A for kth linear constrain set i1 = lin . i1 # starting row index iN = lin . iN # ending row index vsl = lin . vs # var set list kN = - 1 # initialize last col of Ak used Ai = lil_matrix ( ( lin . N , self . var_N ) , dtype = float64 ) for v in vsl : var = self . get_var ( v ) j1 = var . i1 # starting column in A jN = var . iN # ending column in A k1 = kN + 1 # starting column in Ak kN = kN + var . N # ending column in Ak if j1 == jN : # FIXME: Single column slicing broken in lil. for i in range ( Ai . shape [ 0 ] ) : Ai [ i , j1 ] = Ak [ i , k1 ] else : Ai [ : , j1 : jN + 1 ] = Ak [ : , k1 : kN + 1 ] A [ i1 : iN + 1 , : ] = Ai l [ i1 : iN + 1 ] = lin . l u [ i1 : iN + 1 ] = lin . u return A . tocsr ( ) , l , u
857
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L782-L818
[ "def", "ttl", "(", "self", ",", "value", ")", ":", "# get timer", "timer", "=", "getattr", "(", "self", ",", "Annotation", ".", "__TIMER", ",", "None", ")", "# if timer is running, stop the timer", "if", "timer", "is", "not", "None", ":", "timer", ".", "cancel", "(", ")", "# initialize timestamp", "timestamp", "=", "None", "# if value is None", "if", "value", "is", "None", ":", "# nonify timer", "timer", "=", "None", "else", ":", "# else, renew a timer", "# get timestamp", "timestamp", "=", "time", "(", ")", "+", "value", "# start a new timer", "timer", "=", "Timer", "(", "value", ",", "self", ".", "__del__", ")", "timer", ".", "start", "(", ")", "# set/update attributes", "setattr", "(", "self", ",", "Annotation", ".", "__TIMER", ",", "timer", ")", "setattr", "(", "self", ",", "Annotation", ".", "__TS", ",", "timestamp", ")" ]
Adds a constraint to the model .
def add_constraint ( self , con ) : if isinstance ( con , LinearConstraint ) : N , M = con . A . shape if con . name in [ c . name for c in self . lin_constraints ] : logger . error ( "Constraint set named '%s' already exists." % con . name ) return False else : con . i1 = self . lin_N # + 1 con . iN = self . lin_N + N - 1 nv = 0 for vs in con . vs : nv = nv + self . get_var_N ( vs ) if M != nv : logger . error ( "Number of columns of A does not match number" " of variables, A is %d x %d, nv = %d" , N , M , nv ) self . lin_constraints . append ( con ) elif isinstance ( con , NonLinearConstraint ) : N = con . N if con . name in [ c . name for c in self . nln_constraints ] : logger . error ( "Constraint set named '%s' already exists." % con . name ) return False else : con . i1 = self . nln_N # + 1 con . iN = self . nln_N + N self . nln_constraints . append ( con ) else : raise ValueError return True
858
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/opf.py#L821-L854
[ "def", "register_dataframe_method", "(", "method", ")", ":", "def", "inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "class", "AccessorMethod", "(", "object", ")", ":", "def", "__init__", "(", "self", ",", "pandas_obj", ")", ":", "self", ".", "_obj", "=", "pandas_obj", "@", "wraps", "(", "method", ")", "def", "__call__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "method", "(", "self", ".", "_obj", ",", "*", "args", ",", "*", "*", "kwargs", ")", "register_dataframe_accessor", "(", "method", ".", "__name__", ")", "(", "AccessorMethod", ")", "return", "method", "return", "inner", "(", ")" ]
Solves using the Interior Point OPTimizer .
def _solve ( self , x0 , A , l , u , xmin , xmax ) : # Indexes of constrained lines. il = [ i for i , ln in enumerate ( self . _ln ) if 0.0 < ln . rate_a < 1e10 ] nl2 = len ( il ) neqnln = 2 * self . _nb # no. of non-linear equality constraints niqnln = 2 * len ( il ) # no. of lines with constraints user_data = { "A" : A , "neqnln" : neqnln , "niqnln" : niqnln } self . _f ( x0 ) Jdata = self . _dg ( x0 , False , user_data ) # Hdata = self._h(x0, ones(neqnln + niqnln), None, False, user_data) lmbda = { "eqnonlin" : ones ( neqnln ) , "ineqnonlin" : ones ( niqnln ) } H = tril ( self . _hessfcn ( x0 , lmbda ) , format = "coo" ) self . _Hrow , self . _Hcol = H . row , H . col n = len ( x0 ) # the number of variables xl = xmin xu = xmax gl = r_ [ zeros ( 2 * self . _nb ) , - Inf * ones ( 2 * nl2 ) , l ] gu = r_ [ zeros ( 2 * self . _nb ) , zeros ( 2 * nl2 ) , u ] m = len ( gl ) # the number of constraints nnzj = len ( Jdata ) # the number of nonzeros in Jacobian matrix nnzh = 0 #len(H.data) # the number of non-zeros in Hessian matrix f_fcn , df_fcn , g_fcn , dg_fcn , h_fcn = self . _f , self . _df , self . _g , self . _dg , self . _h nlp = pyipopt . create ( n , xl , xu , m , gl , gu , nnzj , nnzh , f_fcn , df_fcn , g_fcn , dg_fcn ) #, h_fcn) # print dir(nlp) # nlp.str_option("print_options_documentation", "yes") # nlp.int_option("max_iter", 10) # x, zl, zu, obj = nlp.solve(x0) success = nlp . solve ( x0 , user_data ) nlp . close ( )
859
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/ipopf.py#L44-L86
[ "def", "pack_rows", "(", "rows", ",", "bitdepth", ")", ":", "assert", "bitdepth", "<", "8", "assert", "8", "%", "bitdepth", "==", "0", "# samples per byte", "spb", "=", "int", "(", "8", "/", "bitdepth", ")", "def", "make_byte", "(", "block", ")", ":", "\"\"\"Take a block of (2, 4, or 8) values,\n and pack them into a single byte.\n \"\"\"", "res", "=", "0", "for", "v", "in", "block", ":", "res", "=", "(", "res", "<<", "bitdepth", ")", "+", "v", "return", "res", "for", "row", "in", "rows", ":", "a", "=", "bytearray", "(", "row", ")", "# Adding padding bytes so we can group into a whole", "# number of spb-tuples.", "n", "=", "float", "(", "len", "(", "a", ")", ")", "extra", "=", "math", ".", "ceil", "(", "n", "/", "spb", ")", "*", "spb", "-", "n", "a", ".", "extend", "(", "[", "0", "]", "*", "int", "(", "extra", ")", ")", "# Pack into bytes.", "# Each block is the samples for one byte.", "blocks", "=", "group", "(", "a", ",", "spb", ")", "yield", "bytearray", "(", "make_byte", "(", "block", ")", "for", "block", "in", "blocks", ")" ]
Applies branch outtages .
def doOutages ( self ) : assert len ( self . branchOutages ) == len ( self . market . case . branches ) weights = [ [ ( False , r ) , ( True , 1 - ( r ) ) ] for r in self . branchOutages ] for i , ln in enumerate ( self . market . case . branches ) : ln . online = weighted_choice ( weights [ i ] ) if ln . online == False : print "Branch outage [%s] in period %d." % ( ln . name , self . stepid )
860
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/continuous/experiment.py#L133-L143
[ "def", "clean", "(", "self", ")", ":", "super", "(", ")", ".", "clean", "(", ")", "# At least a poster (user) or a session key must be associated with", "# the vote instance.", "if", "self", ".", "voter", "is", "None", "and", "self", ".", "anonymous_key", "is", "None", ":", "raise", "ValidationError", "(", "_", "(", "'A user id or an anonymous key must be used.'", ")", ")", "if", "self", ".", "voter", "and", "self", ".", "anonymous_key", ":", "raise", "ValidationError", "(", "_", "(", "'A user id or an anonymous key must be used, but not both.'", ")", ")" ]
Returns the case to its original state .
def reset_case ( self ) : for bus in self . market . case . buses : bus . p_demand = self . pdemand [ bus ] for task in self . tasks : for g in task . env . generators : g . p = task . env . _g0 [ g ] [ "p" ] g . p_max = task . env . _g0 [ g ] [ "p_max" ] g . p_min = task . env . _g0 [ g ] [ "p_min" ] g . q = task . env . _g0 [ g ] [ "q" ] g . q_max = task . env . _g0 [ g ] [ "q_max" ] g . q_min = task . env . _g0 [ g ] [ "q_min" ] g . p_cost = task . env . _g0 [ g ] [ "p_cost" ] g . pcost_model = task . env . _g0 [ g ] [ "pcost_model" ] g . q_cost = task . env . _g0 [ g ] [ "q_cost" ] g . qcost_model = task . env . _g0 [ g ] [ "qcost_model" ] g . c_startup = task . env . _g0 [ g ] [ "startup" ] g . c_shutdown = task . env . _g0 [ g ] [ "shutdown" ]
861
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/continuous/experiment.py#L146-L164
[ "def", "update_thumbnail", "(", "api_key", ",", "api_secret", ",", "video_key", ",", "position", "=", "7.0", ",", "*", "*", "kwargs", ")", ":", "jwplatform_client", "=", "jwplatform", ".", "Client", "(", "api_key", ",", "api_secret", ")", "logging", ".", "info", "(", "\"Updating video thumbnail.\"", ")", "try", ":", "response", "=", "jwplatform_client", ".", "videos", ".", "thumbnails", ".", "update", "(", "video_key", "=", "video_key", ",", "position", "=", "position", ",", "# Parameter which specifies seconds into video to extract thumbnail from.", "*", "*", "kwargs", ")", "except", "jwplatform", ".", "errors", ".", "JWPlatformError", "as", "e", ":", "logging", ".", "error", "(", "\"Encountered an error updating thumbnail.\\n{}\"", ".", "format", "(", "e", ")", ")", "sys", ".", "exit", "(", "e", ".", "message", ")", "return", "response" ]
Do the given numer of episodes and return the rewards of each step as a list .
def doEpisodes ( self , number = 1 ) : for episode in range ( number ) : print "Starting episode %d." % episode # Initialise the profile cycle. if len ( self . profile . shape ) == 1 : # 1D array self . _pcycle = cycle ( self . profile ) else : assert self . profile . shape [ 0 ] >= number self . _pcycle = cycle ( self . profile [ episode , : ] ) # Scale the initial load. c = self . _pcycle . next ( ) for bus in self . market . case . buses : bus . p_demand = self . pdemand [ bus ] * c # Initialise agents and their tasks. for task , agent in zip ( self . tasks , self . agents ) : agent . newEpisode ( ) task . reset ( ) while False in [ task . isFinished ( ) for task in self . tasks ] : if True in [ task . isFinished ( ) for task in self . tasks ] : raise ValueError self . _oneInteraction ( ) self . reset_case ( )
862
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/continuous/experiment.py#L170-L199
[ "def", "arcball_constrain_to_axis", "(", "point", ",", "axis", ")", ":", "v", "=", "np", ".", "array", "(", "point", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "True", ")", "a", "=", "np", ".", "array", "(", "axis", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "True", ")", "v", "-=", "a", "*", "np", ".", "dot", "(", "a", ",", "v", ")", "# on plane", "n", "=", "vector_norm", "(", "v", ")", "if", "n", ">", "_EPS", ":", "if", "v", "[", "2", "]", "<", "0.0", ":", "np", ".", "negative", "(", "v", ",", "v", ")", "v", "/=", "n", "return", "v", "if", "a", "[", "2", "]", "==", "1.0", ":", "return", "np", ".", "array", "(", "[", "1.0", ",", "0.0", ",", "0.0", "]", ")", "return", "unit_vector", "(", "[", "-", "a", "[", "1", "]", ",", "a", "[", "0", "]", ",", "0.0", "]", ")" ]
Sets initial conditions for the experiment .
def reset ( self ) : self . stepid = 0 for task , agent in zip ( self . tasks , self . agents ) : task . reset ( ) agent . module . reset ( ) agent . history . reset ( )
863
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/continuous/experiment.py#L249-L258
[ "def", "read_avro", "(", "file_path_or_buffer", ",", "schema", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "file_path_or_buffer", ",", "six", ".", "string_types", ")", ":", "with", "open", "(", "file_path_or_buffer", ",", "'rb'", ")", "as", "f", ":", "return", "__file_to_dataframe", "(", "f", ",", "schema", ",", "*", "*", "kwargs", ")", "else", ":", "return", "__file_to_dataframe", "(", "file_path_or_buffer", ",", "schema", ",", "*", "*", "kwargs", ")" ]
Update the propensities for all actions . The propensity for last action chosen will be updated using the feedback value that resulted from performing the action .
def _updatePropensities ( self , lastState , lastAction , reward ) : phi = self . recency for action in range ( self . module . numActions ) : carryOver = ( 1 - phi ) * self . module . getValue ( lastState , action ) experience = self . _experience ( lastState , action , lastAction , reward ) self . module . updateValue ( lastState , action , carryOver + experience )
864
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/roth_erev.py#L136-L154
[ "def", "union", "(", "self", ",", "other", ")", ":", "union", "=", "Rect", "(", ")", "lib", ".", "SDL_UnionRect", "(", "self", ".", "_ptr", ",", "other", ".", "_ptr", ",", "union", ".", "_ptr", ")", "return", "union" ]
Proportional probability method .
def _forwardImplementation ( self , inbuf , outbuf ) : assert self . module propensities = self . module . getActionValues ( 0 ) summedProps = sum ( propensities ) probabilities = propensities / summedProps action = eventGenerator ( probabilities ) # action = drawIndex(probabilities) outbuf [ : ] = scipy . array ( [ action ] )
865
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/roth_erev.py#L254-L267
[ "def", "_parse_args", "(", ")", ":", "token_file", "=", "os", ".", "path", ".", "expanduser", "(", "'~/.nikeplus_access_token'", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Export NikePlus data to CSV'", ")", "parser", ".", "add_argument", "(", "'-t'", ",", "'--token'", ",", "required", "=", "False", ",", "default", "=", "None", ",", "help", "=", "(", "'Access token for API, can also store in file %s'", "' to avoid passing via command line'", "%", "(", "token_file", ")", ")", ")", "parser", ".", "add_argument", "(", "'-s'", ",", "'--since'", ",", "type", "=", "_validate_date_str", ",", "help", "=", "(", "'Only process entries starting with YYYY-MM-DD '", "'and newer'", ")", ")", "args", "=", "vars", "(", "parser", ".", "parse_args", "(", ")", ")", "if", "args", "[", "'token'", "]", "is", "None", ":", "try", ":", "with", "open", "(", "token_file", ",", "'r'", ")", "as", "_file", ":", "access_token", "=", "_file", ".", "read", "(", ")", ".", "strip", "(", ")", "except", "IOError", ":", "print", "'Must pass access token via command line or store in file %s'", "%", "(", "token_file", ")", "sys", ".", "exit", "(", "-", "1", ")", "args", "[", "'token'", "]", "=", "access_token", "return", "args" ]
Writes case data to file in Excel format .
def write ( self , file_or_filename ) : self . book = Workbook ( ) self . _write_data ( None ) self . book . save ( file_or_filename )
866
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/excel.py#L38-L43
[ "def", "sample", "(", "self", ",", "features", ")", ":", "logits", ",", "losses", "=", "self", "(", "features", ")", "# pylint: disable=not-callable", "if", "self", ".", "_target_modality_is_real", ":", "return", "logits", ",", "logits", ",", "losses", "# Raw numbers returned from real modality.", "if", "self", ".", "hparams", ".", "sampling_method", "==", "\"argmax\"", ":", "samples", "=", "tf", ".", "argmax", "(", "logits", ",", "axis", "=", "-", "1", ")", "else", ":", "assert", "self", ".", "hparams", ".", "sampling_method", "==", "\"random\"", "def", "multinomial_squeeze", "(", "logits", ",", "temperature", "=", "1.0", ")", ":", "logits_shape", "=", "common_layers", ".", "shape_list", "(", "logits", ")", "reshaped_logits", "=", "(", "tf", ".", "reshape", "(", "logits", ",", "[", "-", "1", ",", "logits_shape", "[", "-", "1", "]", "]", ")", "/", "temperature", ")", "choices", "=", "tf", ".", "multinomial", "(", "reshaped_logits", ",", "1", ")", "choices", "=", "tf", ".", "reshape", "(", "choices", ",", "logits_shape", "[", ":", "-", "1", "]", ")", "return", "choices", "samples", "=", "multinomial_squeeze", "(", "logits", ",", "self", ".", "hparams", ".", "sampling_temp", ")", "return", "samples", ",", "logits", ",", "losses" ]
Writes bus data to an Excel spreadsheet .
def write_bus_data ( self , file ) : bus_sheet = self . book . add_sheet ( "Buses" ) for i , bus in enumerate ( self . case . buses ) : for j , attr in enumerate ( BUS_ATTRS ) : bus_sheet . write ( i , j , getattr ( bus , attr ) )
867
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/excel.py#L56-L63
[ "def", "_get_stddevs", "(", "self", ",", "C", ",", "sites", ",", "pga1100", ",", "sigma_pga", ",", "stddev_types", ")", ":", "std_intra", "=", "self", ".", "_compute_intra_event_std", "(", "C", ",", "sites", ".", "vs30", ",", "pga1100", ",", "sigma_pga", ")", "std_inter", "=", "C", "[", "'t_lny'", "]", "*", "np", ".", "ones_like", "(", "sites", ".", "vs30", ")", "stddevs", "=", "[", "]", "for", "stddev_type", "in", "stddev_types", ":", "assert", "stddev_type", "in", "self", ".", "DEFINED_FOR_STANDARD_DEVIATION_TYPES", "if", "stddev_type", "==", "const", ".", "StdDev", ".", "TOTAL", ":", "stddevs", ".", "append", "(", "self", ".", "_get_total_sigma", "(", "C", ",", "std_intra", ",", "std_inter", ")", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTRA_EVENT", ":", "stddevs", ".", "append", "(", "std_intra", ")", "elif", "stddev_type", "==", "const", ".", "StdDev", ".", "INTER_EVENT", ":", "stddevs", ".", "append", "(", "std_inter", ")", "return", "stddevs" ]
Writes branch data to an Excel spreadsheet .
def write_branch_data ( self , file ) : branch_sheet = self . book . add_sheet ( "Branches" ) for i , branch in enumerate ( self . case . branches ) : for j , attr in enumerate ( BRANCH_ATTRS ) : branch_sheet . write ( i , j , getattr ( branch , attr ) )
868
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/excel.py#L66-L73
[ "def", "_get_observed_mmax", "(", "catalogue", ",", "config", ")", ":", "if", "config", "[", "'input_mmax'", "]", ":", "obsmax", "=", "config", "[", "'input_mmax'", "]", "if", "config", "[", "'input_mmax_uncertainty'", "]", ":", "return", "config", "[", "'input_mmax'", "]", ",", "config", "[", "'input_mmax_uncertainty'", "]", "else", ":", "raise", "ValueError", "(", "'Input mmax uncertainty must be specified!'", ")", "max_location", "=", "np", ".", "argmax", "(", "catalogue", "[", "'magnitude'", "]", ")", "obsmax", "=", "catalogue", "[", "'magnitude'", "]", "[", "max_location", "]", "cond", "=", "isinstance", "(", "catalogue", "[", "'sigmaMagnitude'", "]", ",", "np", ".", "ndarray", ")", "and", "len", "(", "catalogue", "[", "'sigmaMagnitude'", "]", ")", ">", "0", "and", "not", "np", ".", "all", "(", "np", ".", "isnan", "(", "catalogue", "[", "'sigmaMagnitude'", "]", ")", ")", "if", "cond", ":", "if", "not", "np", ".", "isnan", "(", "catalogue", "[", "'sigmaMagnitude'", "]", "[", "max_location", "]", ")", ":", "return", "obsmax", ",", "catalogue", "[", "'sigmaMagnitude'", "]", "[", "max_location", "]", "else", ":", "print", "(", "'Uncertainty not given on observed Mmax\\n'", "'Taking largest magnitude uncertainty found in catalogue'", ")", "return", "obsmax", ",", "np", ".", "nanmax", "(", "catalogue", "[", "'sigmaMagnitude'", "]", ")", "elif", "config", "[", "'input_mmax_uncertainty'", "]", ":", "return", "obsmax", ",", "config", "[", "'input_mmax_uncertainty'", "]", "else", ":", "raise", "ValueError", "(", "'Input mmax uncertainty must be specified!'", ")" ]
Write generator data to file .
def write_generator_data ( self , file ) : generator_sheet = self . book . add_sheet ( "Generators" ) for j , generator in enumerate ( self . case . generators ) : i = generator . bus . _i for k , attr in enumerate ( GENERATOR_ATTRS ) : generator_sheet . write ( j , 0 , i )
869
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/excel.py#L76-L84
[ "def", "defBoundary", "(", "self", ")", ":", "self", ".", "BoroCnstNatAll", "=", "np", ".", "zeros", "(", "self", ".", "StateCount", ")", "+", "np", ".", "nan", "# Find the natural borrowing constraint conditional on next period's state", "for", "j", "in", "range", "(", "self", ".", "StateCount", ")", ":", "PermShkMinNext", "=", "np", ".", "min", "(", "self", ".", "IncomeDstn_list", "[", "j", "]", "[", "1", "]", ")", "TranShkMinNext", "=", "np", ".", "min", "(", "self", ".", "IncomeDstn_list", "[", "j", "]", "[", "2", "]", ")", "self", ".", "BoroCnstNatAll", "[", "j", "]", "=", "(", "self", ".", "solution_next", ".", "mNrmMin", "[", "j", "]", "-", "TranShkMinNext", ")", "*", "(", "self", ".", "PermGroFac_list", "[", "j", "]", "*", "PermShkMinNext", ")", "/", "self", ".", "Rfree_list", "[", "j", "]", "self", ".", "BoroCnstNat_list", "=", "np", ".", "zeros", "(", "self", ".", "StateCount", ")", "+", "np", ".", "nan", "self", ".", "mNrmMin_list", "=", "np", ".", "zeros", "(", "self", ".", "StateCount", ")", "+", "np", ".", "nan", "self", ".", "BoroCnstDependency", "=", "np", ".", "zeros", "(", "(", "self", ".", "StateCount", ",", "self", ".", "StateCount", ")", ")", "+", "np", ".", "nan", "# The natural borrowing constraint in each current state is the *highest*", "# among next-state-conditional natural borrowing constraints that could", "# occur from this current state.", "for", "i", "in", "range", "(", "self", ".", "StateCount", ")", ":", "possible_next_states", "=", "self", ".", "MrkvArray", "[", "i", ",", ":", "]", ">", "0", "self", ".", "BoroCnstNat_list", "[", "i", "]", "=", "np", ".", "max", "(", "self", ".", "BoroCnstNatAll", "[", "possible_next_states", "]", ")", "# Explicitly handle the \"None\" case: ", "if", "self", ".", "BoroCnstArt", "is", "None", ":", "self", ".", "mNrmMin_list", "[", "i", "]", "=", "self", ".", "BoroCnstNat_list", "[", "i", "]", "else", ":", "self", ".", "mNrmMin_list", "[", "i", "]", "=", "np", ".", "max", "(", "[", "self", ".", "BoroCnstNat_list", "[", "i", "]", ",", "self", ".", "BoroCnstArt", "]", ")", "self", ".", "BoroCnstDependency", "[", "i", ",", ":", "]", "=", "self", ".", "BoroCnstNat_list", "[", "i", "]", "==", "self", ".", "BoroCnstNatAll" ]
Writes case data as CSV .
def write ( self , file_or_filename ) : if isinstance ( file_or_filename , basestring ) : file = open ( file_or_filename , "wb" ) else : file = file_or_filename self . writer = csv . writer ( file ) super ( CSVWriter , self ) . write ( file )
870
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/excel.py#L112-L122
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Writes the case data as CSV .
def write_case_data ( self , file ) : writer = self . _get_writer ( file ) writer . writerow ( [ "Name" , "base_mva" ] ) writer . writerow ( [ self . case . name , self . case . base_mva ] )
871
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/excel.py#L125-L130
[ "def", "dump", "(", "self", ")", ":", "assert", "self", ".", "database", "is", "not", "None", "cmd", "=", "\"SELECT count from {} WHERE rowid={}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "STATE_INFO_ROW", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "self", ".", "_from_sqlite", "(", "ret", "[", "0", "]", "[", "0", "]", ")", "+", "self", ".", "inserts", "if", "count", ">", "self", ".", "row_limit", ":", "msg", "=", "\"cleaning up state, this might take a while.\"", "logger", ".", "warning", "(", "msg", ")", "delete", "=", "count", "-", "self", ".", "row_limit", "delete", "+=", "int", "(", "self", ".", "row_limit", "*", "(", "self", ".", "row_cleanup_quota", "/", "100.0", ")", ")", "cmd", "=", "(", "\"DELETE FROM {} WHERE timestamp IN (\"", "\"SELECT timestamp FROM {} ORDER BY timestamp ASC LIMIT {});\"", ")", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ",", "self", ".", "STATE_TABLE", ",", "delete", ")", ")", "self", ".", "_vacuum", "(", ")", "cmd", "=", "\"SELECT COUNT(*) FROM {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "ret", "[", "0", "]", "[", "0", "]", "cmd", "=", "\"UPDATE {} SET count = {} WHERE rowid = {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "_to_sqlite", "(", "count", ")", ",", "self", ".", "STATE_INFO_ROW", ",", ")", ")", "self", ".", "_update_cache_directory_state", "(", ")", "self", ".", "database", ".", "commit", "(", ")", "self", ".", "cursor", ".", "close", "(", ")", "self", ".", "database", ".", "close", "(", ")", "self", ".", "database", "=", "None", "self", ".", "cursor", "=", "None", "self", ".", "inserts", "=", "0" ]
Writes bus data as CSV .
def write_bus_data ( self , file ) : writer = self . _get_writer ( file ) writer . writerow ( BUS_ATTRS ) for bus in self . case . buses : writer . writerow ( [ getattr ( bus , attr ) for attr in BUS_ATTRS ] )
872
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/excel.py#L133-L139
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Writes branch data as CSV .
def write_branch_data ( self , file ) : writer = self . _get_writer ( file ) writer . writerow ( BRANCH_ATTRS ) for branch in self . case . branches : writer . writerow ( [ getattr ( branch , a ) for a in BRANCH_ATTRS ] )
873
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/excel.py#L142-L148
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Write generator data as CSV .
def write_generator_data ( self , file ) : writer = self . _get_writer ( file ) writer . writerow ( [ "bus" ] + GENERATOR_ATTRS ) for g in self . case . generators : i = g . bus . _i writer . writerow ( [ i ] + [ getattr ( g , a ) for a in GENERATOR_ATTRS ] )
874
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/excel.py#L151-L159
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Computes cleared offers and bids .
def run ( self ) : # Start the clock. t0 = time . time ( ) # Manage reactive power offers/bids. haveQ = self . _isReactiveMarket ( ) # Withhold offers/bids outwith optional price limits. self . _withholdOffbids ( ) # Convert offers/bids to pwl functions and update limits. self . _offbidToCase ( ) # Compute dispatch points and LMPs using OPF. success = self . _runOPF ( ) if success : # Get nodal marginal prices from OPF. gteeOfferPrice , gteeBidPrice = self . _nodalPrices ( haveQ ) # Determine quantity and price for each offer/bid. self . _runAuction ( gteeOfferPrice , gteeBidPrice , haveQ ) logger . info ( "SmartMarket cleared in %.3fs" % ( time . time ( ) - t0 ) ) else : for offbid in self . offers + self . bids : offbid . clearedQuantity = 0.0 offbid . clearedPrice = 0.0 offbid . accepted = False offbid . generator . p = 0.0 logger . error ( "Non-convergent market OPF. Blackout!" ) return self . offers , self . bids
875
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/smart_market.py#L131-L166
[ "def", "_ssweek_to_gregorian", "(", "ssweek_year", ",", "ssweek_week", ",", "ssweek_day", ")", ":", "year_start", "=", "_ssweek_year_start", "(", "ssweek_year", ")", "return", "year_start", "+", "dt", ".", "timedelta", "(", "days", "=", "ssweek_day", "-", "1", ",", "weeks", "=", "ssweek_week", "-", "1", ")" ]
Computes dispatch points and LMPs using OPF .
def _runOPF ( self ) : if self . decommit : solver = UDOPF ( self . case , dc = ( self . locationalAdjustment == "dc" ) ) elif self . locationalAdjustment == "dc" : solver = OPF ( self . case , dc = True ) else : solver = OPF ( self . case , dc = False , opt = { "verbose" : True } ) self . _solution = solver . solve ( ) # for ob in self.offers + self.bids: # ob.f = solution["f"] return self . _solution [ "converged" ]
876
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/smart_market.py#L266-L281
[ "def", "surviors_are_inconsistent", "(", "survivor_mapping", ":", "Mapping", "[", "BaseEntity", ",", "Set", "[", "BaseEntity", "]", "]", ")", "->", "Set", "[", "BaseEntity", "]", ":", "victim_mapping", "=", "set", "(", ")", "for", "victim", "in", "itt", ".", "chain", ".", "from_iterable", "(", "survivor_mapping", ".", "values", "(", ")", ")", ":", "if", "victim", "in", "survivor_mapping", ":", "victim_mapping", ".", "add", "(", "victim", ")", "return", "victim_mapping" ]
Return a JSON string representation of a Python data structure .
def encode ( self , o ) : # This doesn't pass the iterator directly to ''.join() because it # sucks at reporting exceptions. It's going to do this internally # anyway because it uses PySequence_Fast or similar. chunks = list ( self . iterencode ( o ) ) return '' . join ( chunks )
877
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/contrib/public/services/simplejson/encoder.py#L278-L289
[ "def", "removeAllEntitlements", "(", "self", ",", "appId", ")", ":", "params", "=", "{", "\"f\"", ":", "\"json\"", ",", "\"appId\"", ":", "appId", "}", "url", "=", "self", ".", "_url", "+", "\"/licenses/removeAllEntitlements\"", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "proxy_url", "=", "self", ".", "_proxy_url", ",", "proxy_port", "=", "self", ".", "_proxy_port", ")" ]
use processors to compute file metrics .
def compute_file_metrics ( processors , language , key , token_list ) : # multiply iterator tli = itertools . tee ( token_list , len ( processors ) ) metrics = OrderedDict ( ) # reset all processors for p in processors : p . reset ( ) # process all tokens for p , tl in zip ( processors , tli ) : p . process_file ( language , key , tl ) # collect metrics from all processors for p in processors : metrics . update ( p . metrics ) return metrics
878
https://github.com/finklabs/metrics/blob/fd9974af498831664b9ae8e8f3834e1ec2e8a699/metrics/compute.py#L8-L26
[ "def", "_post_process_yaml_data", "(", "self", ",", "fixture_data", ":", "Dict", "[", "str", ",", "Dict", "[", "str", ",", "Any", "]", "]", ",", "relationship_columns", ":", "Set", "[", "str", "]", ",", ")", "->", "Tuple", "[", "Dict", "[", "str", ",", "Dict", "[", "str", ",", "Any", "]", "]", ",", "List", "[", "str", "]", "]", ":", "rv", "=", "{", "}", "relationships", "=", "set", "(", ")", "if", "not", "fixture_data", ":", "return", "rv", ",", "relationships", "for", "identifier_id", ",", "data", "in", "fixture_data", ".", "items", "(", ")", ":", "new_data", "=", "{", "}", "for", "col_name", ",", "value", "in", "data", ".", "items", "(", ")", ":", "if", "col_name", "not", "in", "relationship_columns", ":", "new_data", "[", "col_name", "]", "=", "value", "continue", "identifiers", "=", "normalize_identifiers", "(", "value", ")", "if", "identifiers", ":", "relationships", ".", "add", "(", "identifiers", "[", "0", "]", ".", "class_name", ")", "if", "isinstance", "(", "value", ",", "str", ")", "and", "len", "(", "identifiers", ")", "<=", "1", ":", "new_data", "[", "col_name", "]", "=", "identifiers", "[", "0", "]", "if", "identifiers", "else", "None", "else", ":", "new_data", "[", "col_name", "]", "=", "identifiers", "rv", "[", "identifier_id", "]", "=", "new_data", "return", "rv", ",", "list", "(", "relationships", ")" ]
This methods load the IWNLP . Lemmatizer json file and creates a dictionary of lowercased forms which maps each form to its possible lemmas .
def load ( self , lemmatizer_path ) : self . lemmatizer = { } with io . open ( lemmatizer_path , encoding = 'utf-8' ) as data_file : raw = json . load ( data_file ) for entry in raw : self . lemmatizer [ entry [ "Form" ] ] = entry [ "Lemmas" ] self . apply_blacklist ( )
879
https://github.com/Liebeck/IWNLP-py/blob/fd4b81769317476eac0487396cce0faf482a1913/iwnlp/iwnlp_wrapper.py#L14-L24
[ "def", "_print_duration", "(", "self", ")", ":", "duration", "=", "int", "(", "time", ".", "time", "(", ")", "-", "self", ".", "_start_time", ")", "self", ".", "_print", "(", "datetime", ".", "timedelta", "(", "seconds", "=", "duration", ")", ")" ]
Writes the case data to file .
def write ( self , file_or_filename ) : if isinstance ( file_or_filename , basestring ) : file = None try : file = open ( file_or_filename , "wb" ) except Exception , detail : logger . error ( "Error opening %s." % detail ) finally : if file is not None : self . _write_data ( file ) file . close ( ) else : file = file_or_filename self . _write_data ( file ) return file
880
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/io/common.py#L64-L81
[ "def", "listrecords", "(", "*", "*", "kwargs", ")", ":", "record_dumper", "=", "serializer", "(", "kwargs", "[", "'metadataPrefix'", "]", ")", "e_tree", ",", "e_listrecords", "=", "verb", "(", "*", "*", "kwargs", ")", "result", "=", "get_records", "(", "*", "*", "kwargs", ")", "for", "record", "in", "result", ".", "items", ":", "pid", "=", "oaiid_fetcher", "(", "record", "[", "'id'", "]", ",", "record", "[", "'json'", "]", "[", "'_source'", "]", ")", "e_record", "=", "SubElement", "(", "e_listrecords", ",", "etree", ".", "QName", "(", "NS_OAIPMH", ",", "'record'", ")", ")", "header", "(", "e_record", ",", "identifier", "=", "pid", ".", "pid_value", ",", "datestamp", "=", "record", "[", "'updated'", "]", ",", "sets", "=", "record", "[", "'json'", "]", "[", "'_source'", "]", ".", "get", "(", "'_oai'", ",", "{", "}", ")", ".", "get", "(", "'sets'", ",", "[", "]", ")", ",", ")", "e_metadata", "=", "SubElement", "(", "e_record", ",", "etree", ".", "QName", "(", "NS_OAIPMH", ",", "'metadata'", ")", ")", "e_metadata", ".", "append", "(", "record_dumper", "(", "pid", ",", "record", "[", "'json'", "]", ")", ")", "resumption_token", "(", "e_listrecords", ",", "result", ",", "*", "*", "kwargs", ")", "return", "e_tree" ]
The action vector is stripped and the only element is cast to integer and given to the super class .
def performAction ( self , action ) : self . t += 1 super ( ProfitTask , self ) . performAction ( int ( action [ 0 ] ) ) self . samples += 1
881
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/discrete/task.py#L120-L126
[ "def", "get_cycle_time", "(", "self", ",", "issue_or_start_or_key", ")", ":", "if", "isinstance", "(", "issue_or_start_or_key", ",", "basestring", ")", ":", "issue_or_start_or_key", "=", "self", ".", "get_issue", "(", "issue_or_start_or_key", ")", "if", "isinstance", "(", "issue_or_start_or_key", ",", "jira", ".", "resources", ".", "Issue", ")", ":", "progress_started", "=", "self", ".", "get_datetime_issue_in_progress", "(", "issue_or_start_or_key", ")", "elif", "isinstance", "(", "issue_or_start_or_key", ",", "datetime", ".", "datetime", ")", ":", "progress_started", "=", "issue_or_start_or_key", "curr_time", "=", "datetime", ".", "datetime", ".", "now", "(", "dateutil", ".", "tz", ".", "tzlocal", "(", ")", ")", "return", "utils", ".", "working_cycletime", "(", "progress_started", ",", "curr_time", ")" ]
A filtered mapping towards performAction of the underlying environment .
def addReward ( self , r = None ) : r = self . getReward ( ) if r is None else r # by default, the cumulative reward is just the sum over the episode if self . discount : self . cumulativeReward += power ( self . discount , self . samples ) * r else : self . cumulativeReward += r
882
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/discrete/task.py#L156-L166
[ "def", "bucketCSVs", "(", "csvFile", ",", "bucketIdx", "=", "2", ")", ":", "try", ":", "with", "open", "(", "csvFile", ",", "\"rU\"", ")", "as", "f", ":", "reader", "=", "csv", ".", "reader", "(", "f", ")", "headers", "=", "next", "(", "reader", ",", "None", ")", "dataDict", "=", "OrderedDict", "(", ")", "for", "lineNumber", ",", "line", "in", "enumerate", "(", "reader", ")", ":", "if", "line", "[", "bucketIdx", "]", "in", "dataDict", ":", "dataDict", "[", "line", "[", "bucketIdx", "]", "]", ".", "append", "(", "line", ")", "else", ":", "# new bucket", "dataDict", "[", "line", "[", "bucketIdx", "]", "]", "=", "[", "line", "]", "except", "IOError", "as", "e", ":", "print", "e", "filePaths", "=", "[", "]", "for", "i", ",", "(", "_", ",", "lines", ")", "in", "enumerate", "(", "dataDict", ".", "iteritems", "(", ")", ")", ":", "bucketFile", "=", "csvFile", ".", "replace", "(", "\".\"", ",", "\"_\"", "+", "str", "(", "i", ")", "+", "\".\"", ")", "writeCSV", "(", "lines", ",", "headers", ",", "bucketFile", ")", "filePaths", ".", "append", "(", "bucketFile", ")", "return", "filePaths" ]
Returns the initial voltage profile .
def getV0 ( self , v_mag_guess , buses , generators , type = CASE_GUESS ) : if type == CASE_GUESS : Va = array ( [ b . v_angle * ( pi / 180.0 ) for b in buses ] ) Vm = array ( [ b . v_magnitude for b in buses ] ) V0 = Vm * exp ( 1j * Va ) elif type == FLAT_START : V0 = ones ( len ( buses ) ) elif type == FROM_INPUT : V0 = v_mag_guess else : raise ValueError # Set the voltages of PV buses and the reference bus in the guess. # online = [g for g in self.case.generators if g.online] gbus = [ g . bus . _i for g in generators ] Vg = array ( [ g . v_magnitude for g in generators ] ) V0 [ gbus ] = Vg * abs ( V0 [ gbus ] ) / V0 [ gbus ] return V0
883
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/estimator.py#L300-L321
[ "def", "DeleteNotifications", "(", "self", ",", "session_ids", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "if", "not", "session_ids", ":", "return", "for", "session_id", "in", "session_ids", ":", "if", "not", "isinstance", "(", "session_id", ",", "rdfvalue", ".", "SessionID", ")", ":", "raise", "RuntimeError", "(", "\"Can only delete notifications for rdfvalue.SessionIDs.\"", ")", "if", "start", "is", "None", ":", "start", "=", "0", "else", ":", "start", "=", "int", "(", "start", ")", "if", "end", "is", "None", ":", "end", "=", "self", ".", "frozen_timestamp", "or", "rdfvalue", ".", "RDFDatetime", ".", "Now", "(", ")", "for", "queue", ",", "ids", "in", "iteritems", "(", "collection", ".", "Group", "(", "session_ids", ",", "lambda", "session_id", ":", "session_id", ".", "Queue", "(", ")", ")", ")", ":", "queue_shards", "=", "self", ".", "GetAllNotificationShards", "(", "queue", ")", "self", ".", "data_store", ".", "DeleteNotifications", "(", "queue_shards", ",", "ids", ",", "start", ",", "end", ")" ]
Prints comparison of measurements and their estimations .
def output_solution ( self , fd , z , z_est , error_sqrsum ) : col_width = 11 sep = ( "=" * col_width + " " ) * 4 + "\n" fd . write ( "State Estimation\n" ) fd . write ( "-" * 16 + "\n" ) fd . write ( sep ) fd . write ( "Type" . center ( col_width ) + " " ) fd . write ( "Name" . center ( col_width ) + " " ) fd . write ( "Measurement" . center ( col_width ) + " " ) fd . write ( "Estimation" . center ( col_width ) + " " ) fd . write ( "\n" ) fd . write ( sep ) c = 0 for t in [ PF , PT , QF , QT , PG , QG , VM , VA ] : for meas in self . measurements : if meas . type == t : n = meas . b_or_l . name [ : col_width ] . ljust ( col_width ) fd . write ( t . ljust ( col_width ) + " " ) fd . write ( n + " " ) fd . write ( "%11.5f " % z [ c ] ) fd . write ( "%11.5f\n" % z_est [ c ] ) # fd.write("%s\t%s\t%.3f\t%.3f\n" % (t, n, z[c], z_est[c])) c += 1 fd . write ( "\nWeighted sum of error squares = %.4f\n" % error_sqrsum )
884
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pylon/estimator.py#L324-L352
[ "def", "make_inheritable", "(", "token", ")", ":", "return", "win32api", ".", "DuplicateHandle", "(", "win32api", ".", "GetCurrentProcess", "(", ")", ",", "token", ",", "win32api", ".", "GetCurrentProcess", "(", ")", ",", "0", ",", "1", ",", "win32con", ".", "DUPLICATE_SAME_ACCESS", ")" ]
Clears a set of bids and offers .
def run ( self ) : # Compute cleared offer/bid quantities from total dispatched quantity. self . _clearQuantities ( ) # Compute shift values to add to lam to get desired pricing. # lao, fro, lab, frb = self._first_rejected_last_accepted() # Clear offer/bid prices according to auction type. self . _clearPrices ( ) # self._clear_prices(lao, fro, lab, frb) # Clip cleared prices according to guarantees and limits. self . _clipPrices ( ) self . _logClearances ( ) return self . offers , self . bids
885
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/auction.py#L85-L103
[ "def", "format_log_context", "(", "msg", ",", "connection", "=", "None", ",", "keyspace", "=", "None", ")", ":", "connection_info", "=", "connection", "or", "'DEFAULT_CONNECTION'", "if", "keyspace", ":", "msg", "=", "'[Connection: {0}, Keyspace: {1}] {2}'", ".", "format", "(", "connection_info", ",", "keyspace", ",", "msg", ")", "else", ":", "msg", "=", "'[Connection: {0}] {1}'", ".", "format", "(", "connection_info", ",", "msg", ")", "return", "msg" ]
Computes the cleared bid quantity from total dispatched quantity .
def _clearQuantity ( self , offbids , gen ) : # Filter out offers/bids not applicable to the generator in question. gOffbids = [ offer for offer in offbids if offer . generator == gen ] # Offers/bids within valid price limits (not withheld). valid = [ ob for ob in gOffbids if not ob . withheld ] # Sort offers by price in ascending order and bids in decending order. valid . sort ( key = lambda ob : ob . price , reverse = [ False , True ] [ gen . is_load ] ) acceptedQty = 0.0 for ob in valid : # Compute the fraction of the block accepted. accepted = ( ob . totalQuantity - acceptedQty ) / ob . quantity # Clip to the range 0-1. if accepted > 1.0 : accepted = 1.0 elif accepted < 1.0e-05 : accepted = 0.0 ob . clearedQuantity = accepted * ob . quantity ob . accepted = ( accepted > 0.0 ) # Log the event. # if ob.accepted: # logger.info("%s [%s, %.3f, %.3f] accepted at %.2f MW." % # (ob.__class__.__name__, ob.generator.name, ob.quantity, # ob.price, ob.clearedQuantity)) # else: # logger.info("%s [%s, %.3f, %.3f] rejected." % # (ob.__class__.__name__, ob.generator.name, ob.quantity, # ob.price)) # Increment the accepted quantity. acceptedQty += ob . quantity
886
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/auction.py#L120-L158
[ "def", "_setup_conn_old", "(", "*", "*", "kwargs", ")", ":", "host", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.api_url'", ",", "'http://localhost:8080'", ")", "username", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.user'", ")", "password", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.password'", ")", "ca_cert", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.certificate-authority-data'", ")", "client_cert", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-certificate-data'", ")", "client_key", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-key-data'", ")", "ca_cert_file", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.certificate-authority-file'", ")", "client_cert_file", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-certificate-file'", ")", "client_key_file", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-key-file'", ")", "# Override default API settings when settings are provided", "if", "'api_url'", "in", "kwargs", ":", "host", "=", "kwargs", ".", "get", "(", "'api_url'", ")", "if", "'api_user'", "in", "kwargs", ":", "username", "=", "kwargs", ".", "get", "(", "'api_user'", ")", "if", "'api_password'", "in", "kwargs", ":", "password", "=", "kwargs", ".", "get", "(", "'api_password'", ")", "if", "'api_certificate_authority_file'", "in", "kwargs", ":", "ca_cert_file", "=", "kwargs", ".", "get", "(", "'api_certificate_authority_file'", ")", "if", "'api_client_certificate_file'", "in", "kwargs", ":", "client_cert_file", "=", "kwargs", ".", "get", "(", "'api_client_certificate_file'", ")", "if", "'api_client_key_file'", "in", "kwargs", ":", "client_key_file", "=", "kwargs", ".", "get", "(", "'api_client_key_file'", ")", "if", "(", "kubernetes", ".", "client", ".", "configuration", ".", "host", "!=", "host", "or", "kubernetes", ".", "client", ".", "configuration", ".", "user", "!=", "username", "or", "kubernetes", ".", "client", ".", "configuration", ".", "password", "!=", "password", ")", ":", "# Recreates API connection if settings are changed", "kubernetes", ".", "client", ".", "configuration", ".", "__init__", "(", ")", "kubernetes", ".", "client", ".", "configuration", ".", "host", "=", "host", "kubernetes", ".", "client", ".", "configuration", ".", "user", "=", "username", "kubernetes", ".", "client", ".", "configuration", ".", "passwd", "=", "password", "if", "ca_cert_file", ":", "kubernetes", ".", "client", ".", "configuration", ".", "ssl_ca_cert", "=", "ca_cert_file", "elif", "ca_cert", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "prefix", "=", "'salt-kube-'", ",", "delete", "=", "False", ")", "as", "ca", ":", "ca", ".", "write", "(", "base64", ".", "b64decode", "(", "ca_cert", ")", ")", "kubernetes", ".", "client", ".", "configuration", ".", "ssl_ca_cert", "=", "ca", ".", "name", "else", ":", "kubernetes", ".", "client", ".", "configuration", ".", "ssl_ca_cert", "=", "None", "if", "client_cert_file", ":", "kubernetes", ".", "client", ".", "configuration", ".", "cert_file", "=", "client_cert_file", "elif", "client_cert", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "prefix", "=", "'salt-kube-'", ",", "delete", "=", "False", ")", "as", "c", ":", "c", ".", "write", "(", "base64", ".", "b64decode", "(", "client_cert", ")", ")", "kubernetes", ".", "client", ".", "configuration", ".", "cert_file", "=", "c", ".", "name", "else", ":", "kubernetes", ".", "client", ".", "configuration", ".", "cert_file", "=", "None", "if", "client_key_file", ":", "kubernetes", ".", "client", ".", "configuration", ".", "key_file", "=", "client_key_file", "elif", "client_key", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "prefix", "=", "'salt-kube-'", ",", "delete", "=", "False", ")", "as", "k", ":", "k", ".", "write", "(", "base64", ".", "b64decode", "(", "client_key", ")", ")", "kubernetes", ".", "client", ".", "configuration", ".", "key_file", "=", "k", ".", "name", "else", ":", "kubernetes", ".", "client", ".", "configuration", ".", "key_file", "=", "None", "return", "{", "}" ]
Clears prices according to auction type .
def _clearPrices ( self ) : for offbid in self . offers + self . bids : if self . auctionType == DISCRIMINATIVE : offbid . clearedPrice = offbid . price elif self . auctionType == FIRST_PRICE : offbid . clearedPrice = offbid . lmbda else : raise ValueError
887
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/auction.py#L219-L228
[ "def", "_reset_id_table", "(", "self", ",", "mapping", ")", ":", "if", "not", "hasattr", "(", "self", ",", "\"_initialized_id_tables\"", ")", ":", "self", ".", "_initialized_id_tables", "=", "set", "(", ")", "id_table_name", "=", "\"{}_sf_ids\"", ".", "format", "(", "mapping", "[", "\"table\"", "]", ")", "if", "id_table_name", "not", "in", "self", ".", "_initialized_id_tables", ":", "if", "id_table_name", "in", "self", ".", "metadata", ".", "tables", ":", "self", ".", "metadata", ".", "remove", "(", "self", ".", "metadata", ".", "tables", "[", "id_table_name", "]", ")", "id_table", "=", "Table", "(", "id_table_name", ",", "self", ".", "metadata", ",", "Column", "(", "\"id\"", ",", "Unicode", "(", "255", ")", ",", "primary_key", "=", "True", ")", ",", "Column", "(", "\"sf_id\"", ",", "Unicode", "(", "18", ")", ")", ",", ")", "if", "id_table", ".", "exists", "(", ")", ":", "id_table", ".", "drop", "(", ")", "id_table", ".", "create", "(", ")", "self", ".", "_initialized_id_tables", ".", "add", "(", "id_table_name", ")", "return", "id_table_name" ]
Clip cleared prices according to guarantees and limits .
def _clipPrices ( self ) : # Guarantee that cleared offer prices are >= offers. if self . guaranteeOfferPrice : for offer in self . offers : if offer . accepted and offer . clearedPrice < offer . price : offer . clearedPrice = offer . price # Guarantee that cleared bid prices are <= bids. if self . guaranteeBidPrice : for bid in self . bids : if bid . accepted and bid . clearedPrice > bid . price : bid . clearedPrice = bid . price # Clip cleared offer prices. if self . limits . has_key ( "maxClearedOffer" ) : maxClearedOffer = self . limits [ "maxClearedOffer" ] for offer in self . offers : if offer . clearedPrice > maxClearedOffer : offer . clearedPrice = maxClearedOffer # Clip cleared bid prices. if self . limits . has_key ( "minClearedBid" ) : minClearedBid = self . limits [ "minClearedBid" ] for bid in self . bids : if bid . clearedPrice < minClearedBid : bid . clearedPrice = minClearedBid # Make prices uniform across all offers/bids for each generator after # clipping (except for discrim auction) since clipping may only affect # a single block of a multi-block generator. if self . auctionType != DISCRIMINATIVE : for g in self . case . generators : gOffers = [ of for of in self . offers if of . generator == g ] if gOffers : uniformPrice = max ( [ of . clearedPrice for of in gOffers ] ) for of in gOffers : of . clearedPrice = uniformPrice gBids = [ bid for bid in self . bids if bid . vLoad == g ] if gBids : uniformPrice = min ( [ bid . cleared_price for bid in gBids ] ) for bid in gBids : bid . clearedPrice = uniformPrice
888
https://github.com/rwl/pylon/blob/916514255db1ae1661406f0283df756baf960d14/pyreto/auction.py#L265-L311
[ "def", "receive_message", "(", "self", ",", "message", ",", "data", ")", ":", "# noqa: E501 pylint: disable=too-many-return-statements", "if", "data", "[", "MESSAGE_TYPE", "]", "==", "TYPE_DEVICE_ADDED", ":", "uuid", "=", "data", "[", "'device'", "]", "[", "'deviceId'", "]", "name", "=", "data", "[", "'device'", "]", "[", "'name'", "]", "self", ".", "_add_member", "(", "uuid", ",", "name", ")", "return", "True", "if", "data", "[", "MESSAGE_TYPE", "]", "==", "TYPE_DEVICE_REMOVED", ":", "uuid", "=", "data", "[", "'deviceId'", "]", "self", ".", "_remove_member", "(", "uuid", ")", "return", "True", "if", "data", "[", "MESSAGE_TYPE", "]", "==", "TYPE_DEVICE_UPDATED", ":", "uuid", "=", "data", "[", "'device'", "]", "[", "'deviceId'", "]", "name", "=", "data", "[", "'device'", "]", "[", "'name'", "]", "self", ".", "_add_member", "(", "uuid", ",", "name", ")", "return", "True", "if", "data", "[", "MESSAGE_TYPE", "]", "==", "TYPE_MULTIZONE_STATUS", ":", "members", "=", "data", "[", "'status'", "]", "[", "'devices'", "]", "members", "=", "{", "member", "[", "'deviceId'", "]", ":", "member", "[", "'name'", "]", "for", "member", "in", "members", "}", "removed_members", "=", "list", "(", "set", "(", "self", ".", "_members", ".", "keys", "(", ")", ")", "-", "set", "(", "members", ".", "keys", "(", ")", ")", ")", "added_members", "=", "list", "(", "set", "(", "members", ".", "keys", "(", ")", ")", "-", "set", "(", "self", ".", "_members", ".", "keys", "(", ")", ")", ")", "_LOGGER", ".", "debug", "(", "\"(%s) Added members %s, Removed members: %s\"", ",", "self", ".", "_uuid", ",", "added_members", ",", "removed_members", ")", "for", "uuid", "in", "removed_members", ":", "self", ".", "_remove_member", "(", "uuid", ")", "for", "uuid", "in", "added_members", ":", "self", ".", "_add_member", "(", "uuid", ",", "members", "[", "uuid", "]", ")", "for", "listener", "in", "list", "(", "self", ".", "_status_listeners", ")", ":", "listener", ".", "multizone_status_received", "(", ")", "return", "True", "if", "data", "[", "MESSAGE_TYPE", "]", "==", "TYPE_SESSION_UPDATED", ":", "# A temporary group has been formed", "return", "True", "if", "data", "[", "MESSAGE_TYPE", "]", "==", "TYPE_CASTING_GROUPS", ":", "# Answer to GET_CASTING_GROUPS", "return", "True", "return", "False" ]
Try make a GET request with an HTTP client against a certain path and return once any response has been received ignoring any errors .
def wait_for_response ( client , timeout , path = '/' , expected_status_code = None ) : # We want time.monotonic on Pythons that have it, otherwise time.time will # have to do. get_time = getattr ( time , 'monotonic' , time . time ) deadline = get_time ( ) + timeout while True : try : # Don't care what the response is, as long as we get one time_left = deadline - get_time ( ) response = client . get ( path , timeout = max ( time_left , 0.001 ) , allow_redirects = False ) if ( expected_status_code is None or response . status_code == expected_status_code ) : return except requests . exceptions . Timeout : # Requests timed out, our time must be up break except Exception : # Ignore other exceptions pass if get_time ( ) >= deadline : break time . sleep ( 0.1 ) raise TimeoutError ( 'Timeout waiting for HTTP response.' )
889
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/client.py#L205-L248
[ "def", "lessThan", "(", "self", ",", "leftIndex", ",", "rightIndex", ")", ":", "leftData", "=", "self", ".", "sourceModel", "(", ")", ".", "data", "(", "leftIndex", ",", "RegistryTableModel", ".", "SORT_ROLE", ")", "rightData", "=", "self", ".", "sourceModel", "(", ")", ".", "data", "(", "rightIndex", ",", "RegistryTableModel", ".", "SORT_ROLE", ")", "return", "leftData", "<", "rightData" ]
Make a request against a container .
def request ( self , method , path = None , url_kwargs = None , * * kwargs ) : return self . _session . request ( method , self . _url ( path , url_kwargs ) , * * kwargs )
890
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/client.py#L90-L104
[ "def", "swd_sync", "(", "self", ",", "pad", "=", "False", ")", ":", "if", "pad", ":", "self", ".", "_dll", ".", "JLINK_SWD_SyncBytes", "(", ")", "else", ":", "self", ".", "_dll", ".", "JLINK_SWD_SyncBits", "(", ")", "return", "None" ]
Sends an OPTIONS request .
def options ( self , path = None , url_kwargs = None , * * kwargs ) : return self . _session . options ( self . _url ( path , url_kwargs ) , * * kwargs )
891
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/client.py#L120-L132
[ "def", "_generate_examples_validation", "(", "self", ",", "archive", ",", "labels", ")", ":", "# Get the current random seeds.", "numpy_st0", "=", "np", ".", "random", ".", "get_state", "(", ")", "# Set new random seeds.", "np", ".", "random", ".", "seed", "(", "135", ")", "logging", ".", "warning", "(", "'Overwriting cv2 RNG seed.'", ")", "tfds", ".", "core", ".", "lazy_imports", ".", "cv2", ".", "setRNGSeed", "(", "357", ")", "for", "example", "in", "super", "(", "Imagenet2012Corrupted", ",", "self", ")", ".", "_generate_examples_validation", "(", "archive", ",", "labels", ")", ":", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ":", "tf_img", "=", "tf", ".", "image", ".", "decode_jpeg", "(", "example", "[", "'image'", "]", ".", "read", "(", ")", ",", "channels", "=", "3", ")", "image_np", "=", "tfds", ".", "as_numpy", "(", "tf_img", ")", "example", "[", "'image'", "]", "=", "self", ".", "_get_corrupted_example", "(", "image_np", ")", "yield", "example", "# Reset the seeds back to their original values.", "np", ".", "random", ".", "set_state", "(", "numpy_st0", ")" ]
Sends a HEAD request .
def head ( self , path = None , url_kwargs = None , * * kwargs ) : return self . _session . head ( self . _url ( path , url_kwargs ) , * * kwargs )
892
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/client.py#L134-L146
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
Sends a POST request .
def post ( self , path = None , url_kwargs = None , * * kwargs ) : return self . _session . post ( self . _url ( path , url_kwargs ) , * * kwargs )
893
https://github.com/praekeltfoundation/seaworthy/blob/6f10a19b45d4ea1dc3bd0553cc4d0438696c079c/seaworthy/client.py#L148-L160
[ "def", "_generate_examples_validation", "(", "self", ",", "archive", ",", "labels", ")", ":", "# Get the current random seeds.", "numpy_st0", "=", "np", ".", "random", ".", "get_state", "(", ")", "# Set new random seeds.", "np", ".", "random", ".", "seed", "(", "135", ")", "logging", ".", "warning", "(", "'Overwriting cv2 RNG seed.'", ")", "tfds", ".", "core", ".", "lazy_imports", ".", "cv2", ".", "setRNGSeed", "(", "357", ")", "for", "example", "in", "super", "(", "Imagenet2012Corrupted", ",", "self", ")", ".", "_generate_examples_validation", "(", "archive", ",", "labels", ")", ":", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ":", "tf_img", "=", "tf", ".", "image", ".", "decode_jpeg", "(", "example", "[", "'image'", "]", ".", "read", "(", ")", ",", "channels", "=", "3", ")", "image_np", "=", "tfds", ".", "as_numpy", "(", "tf_img", ")", "example", "[", "'image'", "]", "=", "self", ".", "_get_corrupted_example", "(", "image_np", ")", "yield", "example", "# Reset the seeds back to their original values.", "np", ".", "random", ".", "set_state", "(", "numpy_st0", ")" ]
This function serves as a handler for the different implementations of the IUWT decomposition . It allows the different methods to be used almost interchangeably .
def iuwt_decomposition ( in1 , scale_count , scale_adjust = 0 , mode = 'ser' , core_count = 2 , store_smoothed = False , store_on_gpu = False ) : if mode == 'ser' : return ser_iuwt_decomposition ( in1 , scale_count , scale_adjust , store_smoothed ) elif mode == 'mp' : return mp_iuwt_decomposition ( in1 , scale_count , scale_adjust , store_smoothed , core_count ) elif mode == 'gpu' : return gpu_iuwt_decomposition ( in1 , scale_count , scale_adjust , store_smoothed , store_on_gpu )
894
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt.py#L17-L41
[ "def", "leader_for_partition", "(", "self", ",", "partition", ")", ":", "if", "partition", ".", "topic", "not", "in", "self", ".", "_partitions", ":", "return", "None", "elif", "partition", ".", "partition", "not", "in", "self", ".", "_partitions", "[", "partition", ".", "topic", "]", ":", "return", "None", "return", "self", ".", "_partitions", "[", "partition", ".", "topic", "]", "[", "partition", ".", "partition", "]", ".", "leader" ]
This function serves as a handler for the different implementations of the IUWT recomposition . It allows the different methods to be used almost interchangeably .
def iuwt_recomposition ( in1 , scale_adjust = 0 , mode = 'ser' , core_count = 1 , store_on_gpu = False , smoothed_array = None ) : if mode == 'ser' : return ser_iuwt_recomposition ( in1 , scale_adjust , smoothed_array ) elif mode == 'mp' : return mp_iuwt_recomposition ( in1 , scale_adjust , core_count , smoothed_array ) elif mode == 'gpu' : return gpu_iuwt_recomposition ( in1 , scale_adjust , store_on_gpu , smoothed_array )
895
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt.py#L43-L64
[ "def", "handshake_timed_out", "(", "self", ")", ":", "if", "not", "self", ".", "__timer", ":", "return", "False", "if", "self", ".", "__handshake_complete", ":", "return", "False", "return", "self", ".", "__timer_expired" ]
This function calls the a trous algorithm code to decompose the input into its wavelet coefficients . This is the isotropic undecimated wavelet transform implemented for a single CPU core .
def ser_iuwt_decomposition ( in1 , scale_count , scale_adjust , store_smoothed ) : wavelet_filter = ( 1. / 16 ) * np . array ( [ 1 , 4 , 6 , 4 , 1 ] ) # Filter-bank for use in the a trous algorithm. # Initialises an empty array to store the coefficients. detail_coeffs = np . empty ( [ scale_count - scale_adjust , in1 . shape [ 0 ] , in1 . shape [ 1 ] ] ) C0 = in1 # Sets the initial value to be the input array. # The following loop, which iterates up to scale_adjust, applies the a trous algorithm to the scales which are # considered insignificant. This is important as each set of wavelet coefficients depends on the last smoothed # version of the input. if scale_adjust > 0 : for i in range ( 0 , scale_adjust ) : C0 = ser_a_trous ( C0 , wavelet_filter , i ) # The meat of the algorithm - two sequential applications fo the a trous followed by determination and storing of # the detail coefficients. C0 is reassigned the value of C on each loop - C0 is always the smoothest version of the # input image. for i in range ( scale_adjust , scale_count ) : C = ser_a_trous ( C0 , wavelet_filter , i ) # Approximation coefficients. C1 = ser_a_trous ( C , wavelet_filter , i ) # Approximation coefficients. detail_coeffs [ i - scale_adjust , : , : ] = C0 - C1 # Detail coefficients. C0 = C if store_smoothed : return detail_coeffs , C0 else : return detail_coeffs
896
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt.py#L66-L111
[ "def", "init_defaults", "(", "self", ")", ":", "super", "(", "SimpleTable", ",", "self", ")", ".", "init_defaults", "(", ")", "self", ".", "name", "=", "self", ".", "table" ]
This function calls the a trous algorithm code to recompose the input into a single array . This is the implementation of the isotropic undecimated wavelet transform recomposition for a single CPU core .
def ser_iuwt_recomposition ( in1 , scale_adjust , smoothed_array ) : wavelet_filter = ( 1. / 16 ) * np . array ( [ 1 , 4 , 6 , 4 , 1 ] ) # Filter-bank for use in the a trous algorithm. # Determines scale with adjustment and creates a zero array to store the output, unless smoothed_array is given. max_scale = in1 . shape [ 0 ] + scale_adjust if smoothed_array is None : recomposition = np . zeros ( [ in1 . shape [ 1 ] , in1 . shape [ 2 ] ] ) else : recomposition = smoothed_array # The following loops call the a trous algorithm code to recompose the input. The first loop assumes that there are # non-zero wavelet coefficients at scales above scale_adjust, while the second loop completes the recomposition # on the scales less than scale_adjust. for i in range ( max_scale - 1 , scale_adjust - 1 , - 1 ) : recomposition = ser_a_trous ( recomposition , wavelet_filter , i ) + in1 [ i - scale_adjust , : , : ] if scale_adjust > 0 : for i in range ( scale_adjust - 1 , - 1 , - 1 ) : recomposition = ser_a_trous ( recomposition , wavelet_filter , i ) return recomposition
897
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt.py#L113-L149
[ "def", "set_end_date", "(", "self", ",", "lifetime", ")", ":", "self", ".", "end_date", "=", "(", "datetime", ".", "datetime", ".", "now", "(", ")", "+", "datetime", ".", "timedelta", "(", "0", ",", "lifetime", ")", ")" ]
This function calls the a trous algorithm code to recompose the input into a single array . This is the implementation of the isotropic undecimated wavelet transform recomposition for multiple CPU cores .
def mp_iuwt_recomposition ( in1 , scale_adjust , core_count , smoothed_array ) : wavelet_filter = ( 1. / 16 ) * np . array ( [ 1 , 4 , 6 , 4 , 1 ] ) # Filter-bank for use in the a trous algorithm. # Determines scale with adjustment and creates a zero array to store the output, unless smoothed_array is given. max_scale = in1 . shape [ 0 ] + scale_adjust if smoothed_array is None : recomposition = np . zeros ( [ in1 . shape [ 1 ] , in1 . shape [ 2 ] ] ) else : recomposition = smoothed_array # The following loops call the a trous algorithm code to recompose the input. The first loop assumes that there are # non-zero wavelet coefficients at scales above scale_adjust, while the second loop completes the recomposition # on the scales less than scale_adjust. for i in range ( max_scale - 1 , scale_adjust - 1 , - 1 ) : recomposition = mp_a_trous ( recomposition , wavelet_filter , i , core_count ) + in1 [ i - scale_adjust , : , : ] if scale_adjust > 0 : for i in range ( scale_adjust - 1 , - 1 , - 1 ) : recomposition = mp_a_trous ( recomposition , wavelet_filter , i , core_count ) return recomposition
898
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt.py#L242-L279
[ "def", "set_end_date", "(", "self", ",", "lifetime", ")", ":", "self", ".", "end_date", "=", "(", "datetime", ".", "datetime", ".", "now", "(", ")", "+", "datetime", ".", "timedelta", "(", "0", ",", "lifetime", ")", ")" ]
This function calls the a trous algorithm code to decompose the input into its wavelet coefficients . This is the isotropic undecimated wavelet transform implemented for a GPU .
def gpu_iuwt_decomposition ( in1 , scale_count , scale_adjust , store_smoothed , store_on_gpu ) : # The following simple kernel just allows for the construction of a 3D decomposition on the GPU. ker = SourceModule ( """ __global__ void gpu_store_detail_coeffs(float *in1, float *in2, float* out1, int *scale, int *adjust) { const int len = gridDim.x*blockDim.x; const int i = (blockDim.x * blockIdx.x + threadIdx.x); const int j = (blockDim.y * blockIdx.y + threadIdx.y)*len; const int k = (blockDim.z * blockIdx.z + threadIdx.z)*(len*len); const int tid2 = i + j; const int tid3 = i + j + k; if ((blockIdx.z + adjust[0])==scale[0]) { out1[tid3] = in1[tid2] - in2[tid2]; } } """ ) wavelet_filter = ( 1. / 16 ) * np . array ( [ 1 , 4 , 6 , 4 , 1 ] , dtype = np . float32 ) # Filter-bank for use in the a trous algorithm. wavelet_filter = gpuarray . to_gpu_async ( wavelet_filter ) # Initialises an empty array to store the detail coefficients. detail_coeffs = gpuarray . empty ( [ scale_count - scale_adjust , in1 . shape [ 0 ] , in1 . shape [ 1 ] ] , np . float32 ) # Determines whether the array is already on the GPU or not. If not, moves it to the GPU. try : gpu_in1 = gpuarray . to_gpu_async ( in1 . astype ( np . float32 ) ) except : gpu_in1 = in1 # Sets up some working arrays on the GPU to prevent memory transfers. gpu_tmp = gpuarray . empty_like ( gpu_in1 ) gpu_out1 = gpuarray . empty_like ( gpu_in1 ) gpu_out2 = gpuarray . empty_like ( gpu_in1 ) # Sets up some parameters required by the algorithm on the GPU. gpu_scale = gpuarray . zeros ( [ 1 ] , np . int32 ) gpu_adjust = gpuarray . zeros ( [ 1 ] , np . int32 ) gpu_adjust += scale_adjust # Fetches the a trous kernels and sets up the unique storing kernel. gpu_a_trous_row_kernel , gpu_a_trous_col_kernel = gpu_a_trous ( ) gpu_store_detail_coeffs = ker . get_function ( "gpu_store_detail_coeffs" ) grid_rows = int ( in1 . shape [ 0 ] // 32 ) grid_cols = int ( in1 . shape [ 1 ] // 32 ) # The following loop, which iterates up to scale_adjust, applies the a trous algorithm to the scales which are # considered insignificant. This is important as each set of wavelet coefficients depends on the last smoothed # version of the input. if scale_adjust > 0 : for i in range ( 0 , scale_adjust ) : gpu_a_trous_row_kernel ( gpu_in1 , gpu_tmp , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) gpu_a_trous_col_kernel ( gpu_tmp , gpu_out1 , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) gpu_in1 , gpu_out1 = gpu_out1 , gpu_in1 gpu_scale += 1 # The meat of the algorithm - two sequential applications fo the a trous followed by determination and storing of # the detail coefficients. C0 is reassigned the value of C on each loop - C0 is always the smoothest version of the # input image. for i in range ( scale_adjust , scale_count ) : gpu_a_trous_row_kernel ( gpu_in1 , gpu_tmp , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) gpu_a_trous_col_kernel ( gpu_tmp , gpu_out1 , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) # Approximation coefficients. gpu_a_trous_row_kernel ( gpu_out1 , gpu_tmp , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) gpu_a_trous_col_kernel ( gpu_tmp , gpu_out2 , wavelet_filter , gpu_scale , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows ) ) # Approximation coefficients. gpu_store_detail_coeffs ( gpu_in1 , gpu_out2 , detail_coeffs , gpu_scale , gpu_adjust , block = ( 32 , 32 , 1 ) , grid = ( grid_cols , grid_rows , int ( scale_count ) ) ) # Detail coefficients. gpu_in1 , gpu_out1 = gpu_out1 , gpu_in1 gpu_scale += 1 # Return values depend on mode. NOTE: store_smoothed does not work if the result stays on the gpu. if store_on_gpu : return detail_coeffs elif store_smoothed : return detail_coeffs . get ( ) , gpu_in1 . get ( ) else : return detail_coeffs . get ( )
899
https://github.com/ratt-ru/PyMORESANE/blob/b024591ad0bbb69320d08841f28a2c27f62ae1af/pymoresane/iuwt.py#L384-L499
[ "def", "_record_offset", "(", "self", ")", ":", "offset", "=", "self", ".", "blob_file", ".", "tell", "(", ")", "self", ".", "event_offsets", ".", "append", "(", "offset", ")" ]