signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def setWriteToShell ( self , writeToShell = True ) : """connect sysout to the qtSignal"""
if writeToShell and not self . _connected : self . message . connect ( self . stdW ) self . _connected = True elif not writeToShell and self . _connected : try : self . message . disconnect ( self . stdW ) except TypeError : pass # was not connected self . _connected = False
def get_sensor_consumption ( self , service_location_id , sensor_id , start , end , aggregation ) : """Request consumption for a given sensor in a given service location Parameters service _ location _ id : int sensor _ id : int start : int | dt . datetime | pd . Timestamp end : int | dt . datetime | pd . Timestamp start and end support epoch ( in milliseconds ) , datetime and Pandas Timestamp timezone - naive datetimes are assumed to be in UTC aggregation : int 1 = 5 min values ( only available for the last 14 days ) 2 = hourly values 3 = daily values 4 = monthly values 5 = quarterly values Returns dict"""
url = urljoin ( URLS [ 'servicelocation' ] , service_location_id , "sensor" , sensor_id , "consumption" ) return self . _get_consumption ( url = url , start = start , end = end , aggregation = aggregation )
def perform ( self , cmd , msg = '' , extra_headers = None ) : """Perform the call"""
tries = 0 while 1 : conn = None try : conn = self . get_connection ( ) if hasattr ( msg , 'read' ) and hasattr ( msg , 'fileno' ) : msg_length = str ( os . fstat ( msg . fileno ( ) ) . st_size ) elif hasattr ( msg , 'read' ) : msg . seek ( 0 , 2 ) msg_length = str ( msg . tell ( ) + 2 ) else : if msg : try : msg_length = str ( len ( msg ) + 2 ) except TypeError : conn . close ( ) raise ValueError ( 'msg param should be a string or file handle' ) else : msg_length = '2' headers = self . get_headers ( cmd , msg_length , extra_headers ) if isinstance ( msg , types . StringTypes ) : if self . gzip and msg : msg = compress ( msg + '\r\n' , self . compress_level ) else : msg = msg + '\r\n' conn . send ( headers + msg ) else : conn . send ( headers ) if hasattr ( msg , 'read' ) : if hasattr ( msg , 'seek' ) : msg . seek ( 0 ) conn . sendfile ( msg , self . gzip , self . compress_level ) conn . send ( '\r\n' ) try : conn . socket ( ) . shutdown ( socket . SHUT_WR ) except socket . error : pass return get_response ( cmd , conn ) except socket . gaierror as err : if conn is not None : conn . release ( ) raise SpamCError ( str ( err ) ) except socket . timeout as err : if conn is not None : conn . release ( ) raise SpamCTimeOutError ( str ( err ) ) except socket . error as err : if conn is not None : conn . close ( ) errors = ( errno . EAGAIN , errno . EPIPE , errno . EBADF , errno . ECONNRESET ) if err [ 0 ] not in errors or tries >= self . max_tries : raise SpamCError ( "socket.error: %s" % str ( err ) ) except BaseException : if conn is not None : conn . release ( ) raise tries += 1 self . backend_mod . sleep ( self . wait_tries )
def time_window_cutoff ( sw_time , time_cutoff ) : """Allows for cutting the declustering time window at a specific time , outside of which an event of any magnitude is no longer identified as a cluster"""
sw_time = np . array ( [ ( time_cutoff / DAYS ) if x > ( time_cutoff / DAYS ) else x for x in sw_time ] ) return ( sw_time )
def plot_labels ( labels , lattice = None , coords_are_cartesian = False , ax = None , ** kwargs ) : """Adds labels to a matplotlib Axes Args : labels : dict containing the label as a key and the coordinates as value . lattice : Lattice object used to convert from reciprocal to cartesian coordinates coords _ are _ cartesian : Set to True if you are providing . coordinates in cartesian coordinates . Defaults to False . Requires lattice if False . ax : matplotlib : class : ` Axes ` or None if a new figure should be created . kwargs : kwargs passed to the matplotlib function ' text ' . Color defaults to blue and size to 25. Returns : matplotlib figure and matplotlib ax"""
ax , fig , plt = get_ax3d_fig_plt ( ax ) if "color" not in kwargs : kwargs [ "color" ] = "b" if "size" not in kwargs : kwargs [ "size" ] = 25 for k , coords in labels . items ( ) : label = k if k . startswith ( "\\" ) or k . find ( "_" ) != - 1 : label = "$" + k + "$" off = 0.01 if coords_are_cartesian : coords = np . array ( coords ) else : if lattice is None : raise ValueError ( "coords_are_cartesian False requires the lattice" ) coords = lattice . get_cartesian_coords ( coords ) ax . text ( * ( coords + off ) , s = label , ** kwargs ) return fig , ax
def _erase_buffer ( self , output_buffer ) : """Erase readings in the specified buffer to make space ."""
erase_size = self . _model . get ( u'buffer_erase_size' ) buffer_type = u'storage' if output_buffer : buffer_type = u'streaming' old_readings = self . _engine . popn ( buffer_type , erase_size ) # Now go through all of our walkers that could match and # update their availability counts and data buffer pointers for reading in old_readings : stream = DataStream . FromEncoded ( reading . stream ) for walker in self . _queue_walkers : # Only notify the walkers that are on this queue if walker . selector . output == output_buffer : walker . notify_rollover ( stream )
def reinverted ( n , r ) : """Integer with reversed and inverted bits of n assuming bit length r . > > > reinverted ( 1 , 6) 31 > > > [ reinverted ( x , 6 ) for x in [ 7 , 11 , 13 , 14 , 19 , 21 , 22 , 25 , 26 , 28 ] ] [7 , 11 , 19 , 35 , 13 , 21 , 37 , 25 , 41 , 49]"""
result = 0 r = 1 << ( r - 1 ) while n : if not n & 1 : result |= r r >>= 1 n >>= 1 if r : result |= ( r << 1 ) - 1 return result
def check_install ( ) : """Try to detect the two most common installation errors : 1 . Installing on macOS using a Homebrew version of Python 2 . Installing on Linux using Python 2 when GDB is linked with Python 3"""
if platform . system ( ) == 'Darwin' and sys . executable != '/usr/bin/python' : print ( "*" * 79 ) print ( textwrap . fill ( "WARNING: You are not using the version of Python included with " "macOS. If you intend to use Voltron with the LLDB included " "with Xcode, or GDB installed with Homebrew, it will not work " "unless it is installed using the system's default Python. If " "you intend to use Voltron with a debugger installed by some " "other method, it may be safe to ignore this warning. See the " "following documentation for more detailed installation " "instructions: " "https://github.com/snare/voltron/wiki/Installation" , 79 ) ) print ( "*" * 79 ) elif platform . system ( ) == 'Linux' : try : output = check_output ( [ "gdb" , "-batch" , "-q" , "--nx" , "-ex" , "pi print(sys.version_info.major)" ] ) . decode ( "utf-8" ) gdb_python = int ( output ) if gdb_python != sys . version_info . major : print ( "*" * 79 ) print ( textwrap . fill ( "WARNING: You are installing Voltron using Python {0}.x " "and GDB is linked with Python {1}.x. GDB will not be " "able to load Voltron. Please install using Python {1} " "if you intend to use Voltron with the copy of GDB that " "is installed. See the following documentation for more " "detailed installation instructions: " "https://github.com/snare/voltron/wiki/Installation" . format ( sys . version_info . major , gdb_python ) , 79 ) ) print ( "*" * 79 ) except : pass
def _create_model ( self , X , Y ) : """Creates the model given some input data X and Y ."""
# - - - define kernel self . input_dim = X . shape [ 1 ] if self . kernel is None : kern = GPy . kern . Matern52 ( self . input_dim , variance = 1. , ARD = self . ARD ) # + GPy . kern . Bias ( self . input _ dim ) else : kern = self . kernel self . kernel = None # - - - define model noise_var = Y . var ( ) * 0.01 if self . noise_var is None else self . noise_var if not self . sparse : self . model = GPy . models . GPRegression ( X , Y , kernel = kern , noise_var = noise_var ) else : self . model = GPy . models . SparseGPRegression ( X , Y , kernel = kern , num_inducing = self . num_inducing ) # - - - restrict variance if exact evaluations of the objective if self . exact_feval : self . model . Gaussian_noise . constrain_fixed ( 1e-6 , warning = False ) else : # - - - We make sure we do not get ridiculously small residual noise variance self . model . Gaussian_noise . constrain_bounded ( 1e-9 , 1e6 , warning = False )
def _check_cb ( cb_ ) : '''If the callback is None or is not callable , return a lambda that returns the value passed .'''
if cb_ is not None : if hasattr ( cb_ , '__call__' ) : return cb_ else : log . error ( 'log_callback is not callable, ignoring' ) return lambda x : x
def read_separated_lines ( path , separator = ' ' , max_columns = - 1 , keep_empty = False ) : """Reads a text file where each line represents a record with some separated columns . Parameters : path ( str ) : Path to the file to read . separator ( str ) : Separator that is used to split the columns . max _ columns ( int ) : Number of max columns ( if the separator occurs within the last column ) . keep _ empty ( bool ) : If True empty columns are returned as well . Returns : list : A list containing a list for each line read ."""
gen = read_separated_lines_generator ( path , separator , max_columns , keep_empty = keep_empty ) return list ( gen )
def direction ( layer , vec , batch = None , cossim_pow = 0 ) : """Visualize a direction"""
if batch is None : vec = vec [ None , None , None ] return lambda T : _dot_cossim ( T ( layer ) , vec ) else : vec = vec [ None , None ] return lambda T : _dot_cossim ( T ( layer ) [ batch ] , vec )
def power_cycle_vm ( virtual_machine , action = 'on' ) : '''Powers on / off a virtual machine specified by it ' s name . virtual _ machine vim . VirtualMachine object to power on / off virtual machine action Operation option to power on / off the machine'''
if action == 'on' : try : task = virtual_machine . PowerOn ( ) task_name = 'power on' except vim . fault . NoPermission as exc : log . exception ( exc ) raise salt . exceptions . VMwareApiError ( 'Not enough permissions. Required privilege: ' '{}' . format ( exc . privilegeId ) ) except vim . fault . VimFault as exc : log . exception ( exc ) raise salt . exceptions . VMwareApiError ( exc . msg ) except vmodl . RuntimeFault as exc : log . exception ( exc ) raise salt . exceptions . VMwareRuntimeError ( exc . msg ) elif action == 'off' : try : task = virtual_machine . PowerOff ( ) task_name = 'power off' except vim . fault . NoPermission as exc : log . exception ( exc ) raise salt . exceptions . VMwareApiError ( 'Not enough permissions. Required privilege: ' '{}' . format ( exc . privilegeId ) ) except vim . fault . VimFault as exc : log . exception ( exc ) raise salt . exceptions . VMwareApiError ( exc . msg ) except vmodl . RuntimeFault as exc : log . exception ( exc ) raise salt . exceptions . VMwareRuntimeError ( exc . msg ) else : raise salt . exceptions . ArgumentValueError ( 'The given action is not supported' ) try : wait_for_task ( task , get_managed_object_name ( virtual_machine ) , task_name ) except salt . exceptions . VMwareFileNotFoundError as exc : raise salt . exceptions . VMwarePowerOnError ( ' ' . join ( [ 'An error occurred during power' , 'operation, a file was not found: {0}' . format ( exc ) ] ) ) return virtual_machine
def _clear_expired_zones ( self ) : """Update zone status for all expired zones ."""
zones = [ ] for z in list ( self . _zones . keys ( ) ) : zones += [ z ] for z in zones : if self . _zones [ z ] . status != Zone . CLEAR and self . _zone_expired ( z ) : self . _update_zone ( z , Zone . CLEAR )
def draw ( self , ** kwargs ) : """Draws the bar plot of the ranking array of features ."""
if self . orientation_ == 'h' : # Make the plot self . ax . barh ( np . arange ( len ( self . ranks_ ) ) , self . ranks_ , color = 'b' ) # Add ticks and tick labels self . ax . set_yticks ( np . arange ( len ( self . ranks_ ) ) ) if self . show_feature_names_ : self . ax . set_yticklabels ( self . features_ ) else : self . ax . set_yticklabels ( [ ] ) # Order the features from top to bottom on the y axis self . ax . invert_yaxis ( ) # Turn off y grid lines self . ax . yaxis . grid ( False ) elif self . orientation_ == 'v' : # Make the plot self . ax . bar ( np . arange ( len ( self . ranks_ ) ) , self . ranks_ , color = 'b' ) # Add ticks and tick labels self . ax . set_xticks ( np . arange ( len ( self . ranks_ ) ) ) if self . show_feature_names_ : self . ax . set_xticklabels ( self . features_ , rotation = 90 ) else : self . ax . set_xticklabels ( [ ] ) # Turn off x grid lines self . ax . xaxis . grid ( False ) else : raise YellowbrickValueError ( "Orientation must be 'h' or 'v'" )
def events_filter ( self , topics : List [ str ] = None , from_block : BlockSpecification = None , to_block : BlockSpecification = None , ) -> StatelessFilter : """Install a new filter for an array of topics emitted by the contract . Args : topics : A list of event ids to filter for . Can also be None , in which case all events are queried . from _ block : The block number at which to start looking for events . to _ block : The block number at which to stop looking for events . Return : Filter : The filter instance ."""
return self . client . new_filter ( self . address , topics = topics , from_block = from_block , to_block = to_block , )
def covar ( X , remove_mean = False , modify_data = False , weights = None , sparse_mode = 'auto' , sparse_tol = 0.0 ) : """Computes the covariance matrix of X Computes . . math : C _ XX & = & X ^ \t op X while exploiting zero or constant columns in the data matrix . WARNING : Directly use moments _ XX if you can . This function does an additional constant - matrix multiplication and does not return the mean . Parameters X : ndarray ( T , M ) Data matrix remove _ mean : bool True : remove column mean from the data , False : don ' t remove mean . modify _ data : bool If remove _ mean = True , the mean will be removed in the data matrix X , without creating an independent copy . This option is faster but might lead to surprises because your input array is changed . weights : None or ndarray ( T , ) weights assigned to each trajectory point of X . If None , all data points have weight one . If ndarray , each data point is assigned a separate weight . sparse _ mode : str one of : * ' dense ' : always use dense mode * ' sparse ' : always use sparse mode if possible * ' auto ' : automatic sparse _ tol : float Threshold for considering column to be zero in order to save computing effort when the data is sparse or almost sparse . If max ( abs ( X [ : , i ] ) ) < sparse _ tol , then row i ( and also column i if Y is not given ) of the covariance matrix will be set to zero . If Y is given and max ( abs ( Y [ : , i ] ) ) < sparse _ tol , then column i of the covariance matrix will be set to zero . Returns C _ XX : ndarray ( M , M ) Covariance matrix of X See also moments _ XX"""
w , s , M = moments_XX ( X , remove_mean = remove_mean , weights = weights , modify_data = modify_data , sparse_mode = sparse_mode , sparse_tol = sparse_tol ) return M / float ( w )
def edges ( self , nodes = None ) : """Returns a ` ` tuple ` ` of all edges in the ` ` DictGraph ` ` an edge is a pair of * * node objects * * . Arguments : - nodes ( iterable ) [ default : ` ` None ` ` ] iterable of * * node objects * * if specified the edges will be limited to those outgoing from one of the specified nodes ."""
# If a Node has been directly updated ( _ _ not _ _ recommended ) # then the Graph will not know the added nodes and therefore will # miss half of their edges . edges = set ( ) for node in ( nodes or self . iterkeys ( ) ) : ends = self [ node ] . nodes ( ) edges . update ( [ ( node , end ) for end in ends ] ) return tuple ( edges )
def destroy ( self ) : """Destoys the app . Do be careful ."""
r = self . _h . _http_resource ( method = 'DELETE' , resource = ( 'apps' , self . name ) ) return r . ok
def type_profile2 ( sequence , TypedSequence = None ) : """similar to depth _ profile but reports types Args : sequence ( ? ) : compress _ homogenous ( bool ) : ( default = True ) Returns : str : level _ type _ str CommandLine : python - m utool . util _ list - - exec - type _ profile2 Example : > > > # DISABLE _ DOCTEST > > > sequence = [ ] > > > from utool . util _ list import * # NOQA > > > self = typeprof = type _ profile2 ( sequence , type _ sequence _ factory ( ) ) > > > result = ( ' level _ type _ str = % s ' % ( str ( level _ type _ str ) , ) ) > > > print ( result )"""
if TypedSequence is None : TypedSequence = type_sequence_factory ( ) # For a pure bottom level list return the length # if not any ( map ( util _ type . is _ listlike , sequence ) ) or ( isinstance ( sequence , np . ndarray ) and sequence . dtype ! = object ) : if not util_type . is_listlike ( sequence ) or ( isinstance ( sequence , np . ndarray ) and sequence . dtype != object ) : # Scalar / ndarray type if type ( sequence ) == 'numpy.ndarray' : subtype_list = '[%s]' % ( sequence . dtype , ) else : subtype_list = None return TypedSequence ( type ( sequence ) , subtype_list ) elif util_type . is_listlike ( sequence ) : # Sequence type sequence_type = type ( sequence ) subtype_list = [ ] for item in sequence : item_type_profile = type_profile2 ( item , TypedSequence = TypedSequence ) subtype_list . append ( item_type_profile ) sequence_type_profile = TypedSequence ( sequence_type , subtype_list ) return sequence_type_profile
def _filter_execs ( self , isSubroutine ) : """Filters the executables in the dictionary by their type ."""
result = { } for key in self . executables : if ( isinstance ( self . executables [ key ] , Subroutine ) and isSubroutine ) or ( isinstance ( self . executables [ key ] , Function ) and not isSubroutine ) : result [ key ] = self . executables [ key ] return result
def clean ( tf_matrix , tf_matrix_gene_names , target_gene_name ) : """: param tf _ matrix : numpy array . The full transcription factor matrix . : param tf _ matrix _ gene _ names : the full list of transcription factor names , corresponding to the tf _ matrix columns . : param target _ gene _ name : the target gene to remove from the tf _ matrix and tf _ names . : return : a tuple of ( matrix , names ) equal to the specified ones minus the target _ gene _ name if the target happens to be one of the transcription factors . If not , the specified ( tf _ matrix , tf _ names ) is returned verbatim ."""
if target_gene_name not in tf_matrix_gene_names : clean_tf_matrix = tf_matrix else : clean_tf_matrix = np . delete ( tf_matrix , tf_matrix_gene_names . index ( target_gene_name ) , 1 ) clean_tf_names = [ tf for tf in tf_matrix_gene_names if tf != target_gene_name ] assert clean_tf_matrix . shape [ 1 ] == len ( clean_tf_names ) # sanity check return clean_tf_matrix , clean_tf_names
def _matches_prop ( self , obj , prop_name , prop_match ) : """Return a boolean indicating whether a resource object matches with a single property against a property match value . This is used for client - side filtering . Depending on the specified property , this method retrieves the resource properties from the HMC . Parameters : obj ( BaseResource ) : Resource object . prop _ match : Property match value that is used to match the actual value of the specified property against , as follows : - If the match value is a list or tuple , this method is invoked recursively to find whether one or more match values inthe list match . - Else if the property is of string type , its value is matched by interpreting the match value as a regular expression . - Else the property value is matched by exact value comparison with the match value . Returns : bool : Boolean indicating whether the resource object matches w . r . t . the specified property and the match value ."""
if isinstance ( prop_match , ( list , tuple ) ) : # List items are logically ORed , so one matching item suffices . for pm in prop_match : if self . _matches_prop ( obj , prop_name , pm ) : return True else : # Some lists of resources do not have all properties , for example # Hipersocket adapters do not have a " card - location " property . # If a filter property does not exist on a resource , the resource # does not match . try : prop_value = obj . get_property ( prop_name ) except KeyError : return False if isinstance ( prop_value , six . string_types ) : # HMC resource property is Enum String or ( non - enum ) String , # and is both matched by regexp matching . Ideally , regexp # matching should only be done for non - enum strings , but # distinguishing them is not possible given that the client # has no knowledge about the properties . # The regexp matching implemented in the HMC requires begin and # end of the string value to match , even if the ' ^ ' for begin # and ' $ ' for end are not specified in the pattern . The code # here is consistent with that : We add end matching to the # pattern , and begin matching is done by re . match ( ) # automatically . re_match = prop_match + '$' m = re . match ( re_match , prop_value ) if m : return True else : if prop_value == prop_match : return True return False
def _calculate_influence ( self , influence_lambda ) : """Calculate the ranking influence ."""
return np . exp ( - np . arange ( self . num_neurons ) / influence_lambda ) [ : , None ]
def update_args ( self , override_args ) : """Update the argument used to invoke the application Note that this will also update the dictionary of input and output files . Parameters override _ args : dict dictionary passed to the links"""
self . args = extract_arguments ( override_args , self . args ) self . _map_arguments ( self . args ) scratch_dir = self . args . get ( 'scratch' , None ) if is_not_null ( scratch_dir ) : self . _file_stage = FileStageManager ( scratch_dir , '.' ) for link in self . _links . values ( ) : link . _set_file_stage ( self . _file_stage ) self . _latch_file_info ( )
def _assemble_conversion ( self , stmt ) : """Example : p ( HGNC : HK1 ) = > rxn ( reactants ( a ( CHEBI : " CHEBI : 17634 " ) ) , products ( a ( CHEBI : " CHEBI : 4170 " ) ) )"""
pybel_lists = ( [ ] , [ ] ) for pybel_list , agent_list in zip ( pybel_lists , ( stmt . obj_from , stmt . obj_to ) ) : for agent in agent_list : node = _get_agent_grounding ( agent ) # TODO check for missing grounding ? pybel_list . append ( node ) rxn_node_data = reaction ( reactants = pybel_lists [ 0 ] , products = pybel_lists [ 1 ] , ) obj_node = self . model . add_node_from_data ( rxn_node_data ) obj_edge = None # TODO : Any edge information possible here ? # Add node for controller , if there is one if stmt . subj is not None : subj_attr , subj_edge = _get_agent_node ( stmt . subj ) subj_node = self . model . add_node_from_data ( subj_attr ) edge_data_list = _combine_edge_data ( pc . DIRECTLY_INCREASES , subj_edge , obj_edge , stmt . evidence ) for edge_data in edge_data_list : self . model . add_edge ( subj_node , obj_node , ** edge_data )
def default_args ( self ) : """Parse args and return default args ."""
if self . _default_args is None : self . _default_args , unknown = self . parser . parse_known_args ( ) # pylint : disable = W0612 # reinitialize logger with new log level and api settings self . tcex . _logger ( ) if self . _default_args . tc_aot_enabled : # block for AOT message and get params params = self . tcex . playbook . aot_blpop ( ) self . inject_params ( params ) elif self . _default_args . tc_secure_params : # inject secure params from API params = self . _load_secure_params ( ) self . inject_params ( params ) return self . _default_args
def readShiftFile ( self , filename ) : """Reads a shift file from disk and populates a dictionary ."""
order = [ ] fshift = open ( filename , 'r' ) flines = fshift . readlines ( ) fshift . close ( ) common = [ f . strip ( '#' ) . strip ( ) for f in flines if f . startswith ( '#' ) ] c = [ line . split ( ': ' ) for line in common ] # Remove any line comments in the shift file - lines starting with ' # ' # but not part of the common block . for l in c : if l [ 0 ] not in [ 'frame' , 'refimage' , 'form' , 'units' ] : c . remove ( l ) for line in c : line [ 1 ] = line [ 1 ] . strip ( ) self . update ( c ) files = [ f . strip ( ) . split ( ' ' , 1 ) for f in flines if not ( f . startswith ( '#' ) or f . strip ( ) == '' ) ] for f in files : order . append ( f [ 0 ] ) self [ 'order' ] = order for f in files : # Check to see if filename provided is a full filename that corresponds # to a file on the path . If not , try to convert given rootname into # a valid filename based on available files . This may or may not # define the correct filename , which is why it prints out what it is # doing , so that the user can verify and edit the shiftfile if needed . # NOTE : # Supporting the specification of only rootnames in the shiftfile with this # filename expansion is NOT to be documented , but provided solely as # an undocumented , dangerous and not fully supported helper function for # some backwards compatibility . if not os . path . exists ( f [ 0 ] ) : f [ 0 ] = fu . buildRootname ( f [ 0 ] ) print ( 'Defining filename in shiftfile as: ' , f [ 0 ] ) f [ 1 ] = f [ 1 ] . split ( ) try : f [ 1 ] = [ float ( s ) for s in f [ 1 ] ] except : msg = 'Cannot read in ' , s , ' from shiftfile ' , filename , ' as a float number' raise ValueError ( msg ) msg = "At least 2 and at most 4 shift values should be provided in a shiftfile" if len ( f [ 1 ] ) < 2 : raise ValueError ( msg ) elif len ( f [ 1 ] ) == 3 : f [ 1 ] . append ( 1.0 ) elif len ( f [ 1 ] ) == 2 : f [ 1 ] . extend ( [ 0.0 , 1.0 ] ) elif len ( f [ 1 ] ) > 4 : raise ValueError ( msg ) fdict = dict ( files ) self . update ( fdict )
def draw_objects ( self , objects , bounds , img ) : '''draw objects on the image'''
keys = objects . keys ( ) keys . sort ( ) for k in keys : obj = objects [ k ] bounds2 = obj . bounds ( ) if bounds2 is None or mp_util . bounds_overlap ( bounds , bounds2 ) : obj . draw ( img , self . pixmapper , bounds )
def rename_datastore ( datastore_ref , new_datastore_name ) : '''Renames a datastore datastore _ ref vim . Datastore reference to the datastore object to be changed new _ datastore _ name New datastore name'''
ds_name = get_managed_object_name ( datastore_ref ) log . trace ( "Renaming datastore '%s' to '%s'" , ds_name , new_datastore_name ) try : datastore_ref . RenameDatastore ( new_datastore_name ) except vim . fault . NoPermission as exc : log . exception ( exc ) raise salt . exceptions . VMwareApiError ( 'Not enough permissions. Required privilege: ' '{}' . format ( exc . privilegeId ) ) except vim . fault . VimFault as exc : log . exception ( exc ) raise salt . exceptions . VMwareApiError ( exc . msg ) except vmodl . RuntimeFault as exc : log . exception ( exc ) raise salt . exceptions . VMwareRuntimeError ( exc . msg )
def lpushx ( self , key , * values ) : """Insert values at the head of an existing list . : param key : The list ' s key : type key : : class : ` str ` , : class : ` bytes ` : param values : One or more positional arguments to insert at the beginning of the list . Each value is inserted at the beginning of the list individually ( see discussion below ) . : returns : the length of the list after push operations , zero if ` key ` does not refer to a list : rtype : int : raises : : exc : ` ~ tredis . exceptions . TRedisException ` This method inserts ` values ` at the head of the list stored at ` key ` , only if ` key ` already exists and holds a list . In contrary to : meth : ` . lpush ` , no operation will be performed when key does not yet exist . . . note : : * * Time complexity * * : ` ` O ( 1 ) ` `"""
return self . _execute ( [ b'LPUSHX' , key ] + list ( values ) )
def values ( ) : """Get the full current set of B3 values . : return : A dict containing the keys " X - B3 - TraceId " , " X - B3 - ParentSpanId " , " X - B3 - SpanId " , " X - B3 - Sampled " and " X - B3 - Flags " for the current span or subspan . NB some of the values are likely be None , but all keys will be present ."""
result = { } try : # Check if there ' s a sub - span in progress , otherwise use the main span : span = g . get ( "subspan" ) if "subspan" in g else g for header in b3_headers : result [ header ] = span . get ( header ) except RuntimeError : # We ' re probably working outside the Application Context at this point , likely on startup : # https : / / stackoverflow . com / questions / 31444036 / runtimeerror - working - outside - of - application - context # We return a dict of empty values so the expected keys are present . for header in b3_headers : result [ header ] = None return result
def createHiddenFolder ( self ) -> 'File' : """Create Hidden Folder Create a hidden folder . Raise exception if auto delete isn ' t True . @ return : Created folder ."""
if not self . _autoDelete : raise Exception ( "Hidden folders can only be created within" " an autoDelete directory" ) return tempfile . mkdtemp ( dir = self . _path , prefix = "." )
def get_filtered_graph ( self , relations = None , prefix = None ) : """Returns a networkx graph for the whole ontology , for a subset of relations Only implemented for eager methods . Implementation notes : currently this is not cached Arguments - relations : list list of object property IDs , e . g . subClassOf , BFO : 0000050 . If empty , uses all . - prefix : String if specified , create a subgraph using only classes with this prefix , e . g . ENVO , PATO , GO Return nx . MultiDiGraph A networkx MultiDiGraph object representing the filtered ontology"""
# trigger synonym cache self . all_synonyms ( ) self . all_obsoletes ( ) # default method - wrap get _ graph srcg = self . get_graph ( ) if prefix is not None : srcg = srcg . subgraph ( [ n for n in srcg . nodes ( ) if n . startswith ( prefix + ":" ) ] ) if relations is None : logger . info ( "No filtering on " + str ( self ) ) return srcg logger . info ( "Filtering {} for {}" . format ( self , relations ) ) g = nx . MultiDiGraph ( ) # TODO : copy full metadata logger . info ( "copying nodes" ) for ( n , d ) in srcg . nodes ( data = True ) : g . add_node ( n , ** d ) logger . info ( "copying edges" ) num_edges = 0 for ( x , y , d ) in srcg . edges ( data = True ) : if d [ 'pred' ] in relations : num_edges += 1 g . add_edge ( x , y , ** d ) logger . info ( "Filtered edges: {}" . format ( num_edges ) ) return g
def unpack ( stream , ** kwargs ) : '''. . versionadded : : 2018.3.4 Wraps msgpack . unpack . By default , this function uses the msgpack module and falls back to msgpack _ pure , if the msgpack is not available . You can pass an alternate msgpack module using the _ msgpack _ module argument .'''
msgpack_module = kwargs . pop ( '_msgpack_module' , msgpack ) return msgpack_module . unpack ( stream , ** kwargs )
def cmd_sync ( self , low , timeout = None , full_return = False ) : '''Execute a runner function synchronously ; eauth is respected This function requires that : conf _ master : ` external _ auth ` is configured and the user is authorized to execute runner functions : ( ` ` @ runner ` ` ) . . . code - block : : python runner . eauth _ sync ( { ' fun ' : ' jobs . list _ jobs ' , ' username ' : ' saltdev ' , ' password ' : ' saltdev ' , ' eauth ' : ' pam ' ,'''
event = salt . utils . event . get_master_event ( self . opts , self . opts [ 'sock_dir' ] , listen = True ) job = self . master_call ( ** low ) ret_tag = salt . utils . event . tagify ( 'ret' , base = job [ 'tag' ] ) if timeout is None : timeout = self . opts . get ( 'rest_timeout' , 300 ) ret = event . get_event ( tag = ret_tag , full = True , wait = timeout , auto_reconnect = True ) if ret is None : raise salt . exceptions . SaltClientTimeout ( "RunnerClient job '{0}' timed out" . format ( job [ 'jid' ] ) , jid = job [ 'jid' ] ) return ret if full_return else ret [ 'data' ] [ 'return' ]
def shrink ( self ) : """Calculate the Constant - Correlation covariance matrix . : return : shrunk sample covariance matrix : rtype : np . ndarray"""
x = np . nan_to_num ( self . X . values ) # de - mean returns t , n = np . shape ( x ) meanx = x . mean ( axis = 0 ) x = x - np . tile ( meanx , ( t , 1 ) ) # compute sample covariance matrix sample = ( 1.0 / t ) * np . dot ( x . T , x ) # compute prior var = np . diag ( sample ) . reshape ( - 1 , 1 ) sqrtvar = np . sqrt ( var ) _var = np . tile ( var , ( n , ) ) _sqrtvar = np . tile ( sqrtvar , ( n , ) ) r_bar = ( np . sum ( sample / ( _sqrtvar * _sqrtvar . T ) ) - n ) / ( n * ( n - 1 ) ) prior = r_bar * ( _sqrtvar * _sqrtvar . T ) prior [ np . eye ( n ) == 1 ] = var . reshape ( - 1 ) # compute shrinkage parameters and constant if self . delta is None : # what we call pi - hat y = x ** 2.0 phi_mat = np . dot ( y . T , y ) / t - 2 * np . dot ( x . T , x ) * sample / t + sample ** 2 phi = np . sum ( phi_mat ) # what we call rho - hat term1 = np . dot ( ( x ** 3 ) . T , x ) / t help_ = np . dot ( x . T , x ) / t help_diag = np . diag ( help_ ) term2 = np . tile ( help_diag , ( n , 1 ) ) . T * sample term3 = help_ * _var term4 = _var * sample theta_mat = term1 - term2 - term3 + term4 theta_mat [ np . eye ( n ) == 1 ] = np . zeros ( n ) rho = sum ( np . diag ( phi_mat ) ) + r_bar * np . sum ( np . dot ( ( 1.0 / sqrtvar ) , sqrtvar . T ) * theta_mat ) # what we call gamma - hat gamma = np . linalg . norm ( sample - prior , "fro" ) ** 2 # compute shrinkage constant kappa = ( phi - rho ) / gamma shrinkage = max ( 0.0 , min ( 1.0 , kappa / t ) ) self . delta = shrinkage else : # use specified constant shrinkage = self . delta # compute the estimator sigma = shrinkage * prior + ( 1 - shrinkage ) * sample return self . format_and_annualise ( sigma )
def get_list_of_paths ( self ) : """return a list of unique paths in the file list"""
all_paths = [ ] for p in self . fl_metadata : try : all_paths . append ( p [ 'path' ] ) except : try : print ( 'cls_filelist - no key path, ignoring folder ' + str ( p ) ) except : print ( 'cls_filelist - no key path, ignoring odd character folder' ) return list ( set ( all_paths ) )
def find_types ( observatory , match = None , trend = None , connection = None , ** connection_kw ) : """Find the available data types for a given observatory . See also gwdatafind . http . HTTPConnection . find _ types FflConnection . find _ types for details on the underlying method ( s )"""
return sorted ( connection . find_types ( observatory , match = match ) , key = lambda x : _type_priority ( observatory , x , trend = trend ) )
def RunJob ( self , job ) : """Does the actual work of the Cron , if the job is due to run . Args : job : The cronjob rdfvalue that should be run . Must be leased . Returns : A boolean indicating if this cron job was started or not . False may be returned when the threadpool is already full . Raises : LockError : if the object is not locked . ValueError : If the job argument is invalid ."""
if not job . leased_until : raise LockError ( "CronJob must be leased for Run() to be called." ) if job . leased_until < rdfvalue . RDFDatetime . Now ( ) : raise LockError ( "CronJob lease expired for %s." % job . cron_job_id ) logging . info ( "Starting cron job: %s" , job . cron_job_id ) if job . args . action_type == job . args . ActionType . SYSTEM_CRON_ACTION : cls_name = job . args . system_cron_action . job_class_name job_cls = registry . SystemCronJobRegistry . CronJobClassByName ( cls_name ) name = "%s runner" % cls_name elif job . args . action_type == job . args . ActionType . HUNT_CRON_ACTION : job_cls = registry . CronJobRegistry . CronJobClassByName ( "RunHunt" ) name = "Hunt runner" else : raise ValueError ( "CronJob %s doesn't have a valid args type set." % job . cron_job_id ) run_state = rdf_cronjobs . CronJobRun ( cron_job_id = job . cron_job_id , status = "RUNNING" ) run_state . GenerateRunId ( ) run_obj = job_cls ( run_state , job ) wait_for_start_event , signal_event , wait_for_write_event = ( threading . Event ( ) , threading . Event ( ) , threading . Event ( ) ) try : self . _GetThreadPool ( ) . AddTask ( target = run_obj . StartRun , args = ( wait_for_start_event , signal_event , wait_for_write_event ) , name = name , blocking = False , inline = False ) if not wait_for_start_event . wait ( TASK_STARTUP_WAIT ) : logging . error ( "Cron job run task for %s is too slow to start." , job . cron_job_id ) # Most likely the thread pool is full and the task is sitting on the # queue . Make sure we don ' t put more things on the queue by returning # False . return False # We know that the cron job task has started , unblock it by setting # the signal event . If signal _ event is not set ( this happens if the # task sits on a ThreadPool ' s queue doing nothing , see the # if - statement above ) the task will just be a no - op when ThreadPool # finally gets to it . This way we can ensure that we can safely return # the lease and let another worker schedule the same job . signal_event . set ( ) wait_for_write_event . wait ( TASK_STARTUP_WAIT ) return True except threadpool . Full : return False
def prerequisite_check ( ) : """Check prerequisites of the framework , including Python version , installation of modules , etc . Returns : Optional [ str ] : If the check is not passed , return error message regarding failed test case . None is returned otherwise ."""
# Check Python version if sys . version_info < ( 3 , 6 ) : version_str = "%s.%s.%s" % sys . version_info [ : 3 ] # TRANSLATORS : This word is used as a part of search query suggested to users , # it may appears in context like " Ubuntu 16.04 install Python 3.7" search_url = build_search_query ( _ ( "install" ) + " Python 3.7" ) return _ ( "EH Forwarder Bot requires a minimum of Python 3.6 to run. You " "are currently using Python {version}. \n" "\n" "You may want to try:\n" "{url}" ) . format ( version = version_str , url = search_url ) # Check installations of modules modules_err = _ ( "You may want to visit the modules repository to find a list of " "available modules to install.\n" "https://github.com/blueset/ehForwarderBot/wiki/Channels-Repository" ) # 1 . At least 1 master channel must be installed try : next ( pkg_resources . iter_entry_points ( "ehforwarderbot.master" ) ) except StopIteration : return _ ( "No master channel detected. EH Forwarder Bot requires at least one " "master channel installed to run." ) + "\n\n" + modules_err # 2 . At least 1 slave channel must be installed try : next ( pkg_resources . iter_entry_points ( "ehforwarderbot.slave" ) ) except StopIteration : return _ ( "No slave channel detected. EH Forwarder Bot requires at least one " "slave channel installed to run." ) + "\n\n" + modules_err
def list_syntax ( self ) : '''Prints a list of available syntax for the current paste service'''
syntax_list = [ 'Available syntax for %s:' % ( self ) ] logging . info ( syntax_list [ 0 ] ) for key in self . SYNTAX_DICT . keys ( ) : syntax = '\t%-20s%-30s' % ( key , self . SYNTAX_DICT [ key ] ) logging . info ( syntax ) syntax_list . append ( syntax ) return syntax_list
def __construct_from_components ( self , ns_uri , prefix = None , schema_location = None ) : """Initialize this instance from a namespace URI , and optional prefix and schema location URI ."""
assert ns_uri # other fields are optional self . uri = ns_uri self . schema_location = schema_location or None self . prefixes = OrderedSet ( ) if prefix : self . prefixes . add ( prefix ) self . preferred_prefix = prefix or None
def save ( self , out , kind = None , ** kw ) : """Serializes the QR Code in one of the supported formats . The serialization format depends on the filename extension . * * Common keywords * * Name Description scale Integer or float indicating the size of a single module . Default : 1 . The interpretation of the scaling factor depends on the serializer . For pixel - based output ( like PNG ) the scaling factor is interepreted as pixel - size ( 1 = 1 pixel ) . EPS interprets ` ` 1 ` ` as 1 point ( 1/72 inch ) per module . Some serializers ( like SVG ) accept float values . If the serializer does not accept float values , the value will be converted to an integer value ( note : int ( 1.6 ) = = 1 ) . border Integer indicating the size of the quiet zone . If set to ` ` None ` ` ( default ) , the recommended border size will be used ( ` ` 4 ` ` for QR Codes , ` ` 2 ` ` for a Micro QR Codes ) . color A string or tuple representing a color value for the dark modules . The default value is " black " . The color can be provided as ` ` ( R , G , B ) ` ` tuple , as web color name ( like " red " ) or in hexadecimal format ( ` ` # RGB ` ` or ` ` # RRGGBB ` ` ) . Some serializers ( SVG and PNG ) accept an alpha transparency value like ` ` # RRGGBBAA ` ` . background A string or tuple representing a color for the light modules or background . See " color " for valid values . The default value depends on the serializer . SVG uses no background color ( ` ` None ` ` ) by default , other serializers use " white " as default background color . * * Scalable Vector Graphics ( SVG ) * * Name Description out Filename or io . BytesIO kind " svg " or " svgz " ( to create a gzip compressed SVG ) scale integer or float color Default : " # 000 " ( black ) ` ` None ` ` is a valid value . If set to ` ` None ` ` , the resulting path won ' t have a " stroke " attribute . The " stroke " attribute may be defined via CSS ( external ) . If an alpha channel is defined , the output depends of the used SVG version . For SVG versions > = 2.0 , the " stroke " attribute will have a value like " rgba ( R , G , B , A ) " , otherwise the path gets another attribute " stroke - opacity " to emulate the alpha channel . To minimize the document size , the SVG serializer uses automatically the shortest color representation : If a value like " # 00000 " is provided , the resulting document will have a color value of " # 000 " . If the color is " # FF0000 " , the resulting color is not " # F00 " , but the web color name " red " . background Default value ` ` None ` ` . If this paramater is set to another value , the resulting image will have another path which is used to define the background color . If an alpha channel is used , the resulting path may have a " fill - opacity " attribute ( for SVG version < 2.0) or the " fill " attribute has a " rgba ( R , G , B , A ) " value . See keyword " color " for further details . xmldecl Boolean value ( default : ` ` True ` ` ) indicating whether the document should have an XML declaration header . Set to ` ` False ` ` to omit the header . svgns Boolean value ( default : ` ` True ` ` ) indicating whether the document should have an explicit SVG namespace declaration . Set to ` ` False ` ` to omit the namespace declaration . The latter might be useful if the document should be embedded into a HTML 5 document where the SVG namespace is implicitly defined . title String ( default : ` ` None ` ` ) Optional title of the generated SVG document . desc String ( default : ` ` None ` ` ) Optional description of the generated SVG document . svgid A string indicating the ID of the SVG document ( if set to ` ` None ` ` ( default ) , the SVG element won ' t have an ID ) . svgclass Default : " segno " . The CSS class of the SVG document ( if set to ` ` None ` ` , the SVG element won ' t have a class ) . lineclass Default : " qrline " . The CSS class of the path element ( which draws the dark modules ( if set to ` ` None ` ` , the path won ' t have a class ) . omitsize Indicates if width and height attributes should be omitted ( default : ` ` False ` ` ) . If these attributes are omitted , a ` ` viewBox ` ` attribute will be added to the document . unit Default : ` ` None ` ` Inidctaes the unit for width / height and other coordinates . By default , the unit is unspecified and all values are in the user space . Valid values : em , ex , px , pt , pc , cm , mm , in , and percentages ( any string is accepted , this parameter is not validated by the serializer ) encoding Encoding of the XML document . " utf - 8 " by default . svgversion SVG version ( default : ` ` None ` ` ) . If specified ( a float ) , the resulting document has an explicit " version " attribute . If set to ` ` None ` ` , the document won ' t have a " version " attribute . This parameter is not validated . compresslevel Default : 9 . This parameter is only valid , if a compressed SVG document should be created ( file extension " svgz " ) . 1 is fastest and produces the least compression , 9 is slowest and produces the most . 0 is no compression . * * Portable Network Graphics ( PNG ) * * Name Description out Filename or io . BytesIO kind " png " scale integer color Default : " # 000 " ( black ) ` ` None ` ` is a valid value iff background is not ` ` None ` ` . background Default value ` ` # fff ` ` ( white ) See keyword " color " for further details . compresslevel Default : 9 . Integer indicating the compression level for the ` ` IDAT ` ` ( data ) chunk . 1 is fastest and produces the least compression , 9 is slowest and produces the most . 0 is no compression . dpi Default : None . Specifies the DPI value for the image . By default , the DPI value is unspecified . Please note that the DPI value is converted into meters ( maybe with rounding errors ) since PNG does not support the unit " dots per inch " . addad Boolean value ( default : True ) to ( dis - ) allow a " Software " comment indicating that the file was created by Segno . * * Encapsulated PostScript ( EPS ) * * Name Description out Filename or io . StringIO kind " eps " scale integer or float color Default : " # 000 " ( black ) background Default value : ` ` None ` ` ( no background ) * * Portable Document Format ( PDF ) * * Name Description out Filename or io . BytesIO kind " pdf " scale integer or float compresslevel Default : 9 . Integer indicating the compression level . 1 is fastest and produces the least compression , 9 is slowest and produces the most . 0 is no compression . * * Text ( TXT ) * * Does not support the " scale " keyword ! Name Description out Filename or io . StringIO kind " txt " color Default : " 1" background Default : " 0" * * ANSI escape code * * Supports the " border " keyword , only ! Name Description kind " ans " * * Portable Bitmap ( PBM ) * * Name Description out Filename or io . BytesIO kind " pbm " scale integer plain Default : False . Boolean to switch between the P4 and P1 format . If set to ` ` True ` ` , the ( outdated ) P1 serialization format is used . * * Portable Arbitrary Map ( PAM ) * * Name Description out Filename or io . BytesIO kind " pam " scale integer color Default : " # 000 " ( black ) . background Default value ` ` # fff ` ` ( white ) . Use ` ` None ` ` for a transparent background . * * LaTeX / PGF / TikZ * * To use the output of this serializer , the ` ` PGF / TikZ ` ` ( and optionally ` ` hyperref ` ` ) package is required in the LaTeX environment . The serializer itself does not depend on any external packages . Name Description out Filename or io . StringIO kind " tex " scale integer or float color LaTeX color name ( default : " black " ) . The color is written " at it is " , so ensure that the color is a standard color or it has been defined in the enclosing LaTeX document . url Default : ` ` None ` ` . Optional URL where the QR Code should point to . Requires the ` ` hyperref ` ` package in your LaTeX environment . * * X BitMap ( XBM ) * * Name Description out Filename or io . StringIO kind " xbm " scale integer name Name of the variable ( default : " img " ) * * X PixMap ( XPM ) * * Name Description out Filename or io . StringIO kind " xpm " scale integer color Default : " # 000 " ( black ) . background Default value ` ` # fff ` ` ( white ) ` ` None ` ` indicates a transparent background . name Name of the variable ( default : " img " ) : param out : A filename or a writable file - like object with a ` ` name ` ` attribute . Use the ` kind ` parameter if ` out ` is a : py : class : ` io . ByteIO ` or : py : class : ` io . StringIO ` stream which don ' t have a ` ` name ` ` attribute . : param kind : If the desired output format cannot be determined from the ` ` out ` ` parameter , this parameter can be used to indicate the serialization format ( i . e . " svg " to enforce SVG output ) : param kw : Any of the supported keywords by the specific serialization method ."""
writers . save ( self . matrix , self . _version , out , kind , ** kw )
def velocity_dispersion_numerical ( self , kwargs_lens , kwargs_lens_light , kwargs_anisotropy , kwargs_aperture , psf_fwhm , aperture_type , anisotropy_model , r_eff = None , kwargs_numerics = { } , MGE_light = False , MGE_mass = False , lens_model_kinematics_bool = None , light_model_kinematics_bool = None , Hernquist_approx = False ) : """Computes the LOS velocity dispersion of the deflector galaxy with arbitrary combinations of light and mass models . For a detailed description , visit the description of the Galkin ( ) class . Additionaly to executing the Galkin routine , it has an optional Multi - Gaussian - Expansion decomposition of lens and light models that do not have a three - dimensional distribution built in , such as Sersic profiles etc . The center of all the lens and lens light models that are part of the kinematic estimate must be centered on the same point . : param kwargs _ lens : lens model parameters : param kwargs _ lens _ light : lens light parameters : param kwargs _ anisotropy : anisotropy parameters ( see Galkin module ) : param kwargs _ aperture : aperture parameters ( see Galkin module ) : param psf _ fwhm : full width at half maximum of the seeing ( Gaussian form ) : param aperture _ type : type of aperture ( see Galkin module : param anisotropy _ model : stellar anisotropy model ( see Galkin module ) : param r _ eff : a rough estimate of the half light radius of the lens light in case of computing the MGE of the light profile : param kwargs _ numerics : keyword arguments that contain numerical options ( see Galkin module ) : param MGE _ light : bool , if true performs the MGE for the light distribution : param MGE _ mass : bool , if true performs the MGE for the mass distribution : param lens _ model _ kinematics _ bool : bool list of length of the lens model . Only takes a subset of all the models as part of the kinematics computation ( can be used to ignore substructure , shear etc that do not describe the main deflector potential : param light _ model _ kinematics _ bool : bool list of length of the light model . Only takes a subset of all the models as part of the kinematics computation ( can be used to ignore light components that do not describe the main deflector : return : LOS velocity dispersion [ km / s ]"""
kwargs_cosmo = { 'D_d' : self . lensCosmo . D_d , 'D_s' : self . lensCosmo . D_s , 'D_ds' : self . lensCosmo . D_ds } mass_profile_list = [ ] kwargs_profile = [ ] if lens_model_kinematics_bool is None : lens_model_kinematics_bool = [ True ] * len ( kwargs_lens ) for i , lens_model in enumerate ( self . kwargs_options [ 'lens_model_list' ] ) : if lens_model_kinematics_bool [ i ] is True : mass_profile_list . append ( lens_model ) if lens_model in [ 'INTERPOL' , 'INTERPOL_SCLAED' ] : center_x , center_y = self . _lensModelExt . lens_center ( kwargs_lens , k = i ) kwargs_lens_i = copy . deepcopy ( kwargs_lens [ i ] ) kwargs_lens_i [ 'grid_interp_x' ] -= center_x kwargs_lens_i [ 'grid_interp_y' ] -= center_y else : kwargs_lens_i = { k : v for k , v in kwargs_lens [ i ] . items ( ) if not k in [ 'center_x' , 'center_y' ] } kwargs_profile . append ( kwargs_lens_i ) if MGE_mass is True : lensModel = LensModel ( lens_model_list = mass_profile_list ) massModel = LensModelExtensions ( lensModel ) theta_E = massModel . effective_einstein_radius ( kwargs_profile ) r_array = np . logspace ( - 4 , 2 , 200 ) * theta_E mass_r = lensModel . kappa ( r_array , np . zeros_like ( r_array ) , kwargs_profile ) amps , sigmas , norm = mge . mge_1d ( r_array , mass_r , N = 20 ) mass_profile_list = [ 'MULTI_GAUSSIAN_KAPPA' ] kwargs_profile = [ { 'amp' : amps , 'sigma' : sigmas } ] light_profile_list = [ ] kwargs_light = [ ] if light_model_kinematics_bool is None : light_model_kinematics_bool = [ True ] * len ( kwargs_lens_light ) for i , light_model in enumerate ( self . kwargs_options [ 'lens_light_model_list' ] ) : if light_model_kinematics_bool [ i ] : light_profile_list . append ( light_model ) kwargs_lens_light_i = { k : v for k , v in kwargs_lens_light [ i ] . items ( ) if not k in [ 'center_x' , 'center_y' ] } if 'q' in kwargs_lens_light_i : kwargs_lens_light_i [ 'q' ] = 1 kwargs_light . append ( kwargs_lens_light_i ) if r_eff is None : lensAnalysis = LensAnalysis ( { 'lens_light_model_list' : light_profile_list } ) r_eff = lensAnalysis . half_light_radius_lens ( kwargs_light , model_bool_list = light_model_kinematics_bool ) if Hernquist_approx is True : light_profile_list = [ 'HERNQUIST' ] kwargs_light = [ { 'Rs' : r_eff , 'amp' : 1. } ] else : if MGE_light is True : lightModel = LightModel ( light_profile_list ) r_array = np . logspace ( - 3 , 2 , 200 ) * r_eff * 2 flux_r = lightModel . surface_brightness ( r_array , 0 , kwargs_light ) amps , sigmas , norm = mge . mge_1d ( r_array , flux_r , N = 20 ) light_profile_list = [ 'MULTI_GAUSSIAN' ] kwargs_light = [ { 'amp' : amps , 'sigma' : sigmas } ] galkin = Galkin ( mass_profile_list , light_profile_list , aperture_type = aperture_type , anisotropy_model = anisotropy_model , fwhm = psf_fwhm , kwargs_cosmo = kwargs_cosmo , ** kwargs_numerics ) sigma2 = galkin . vel_disp ( kwargs_profile , kwargs_light , kwargs_anisotropy , kwargs_aperture ) return sigma2
def login_required ( obj ) : """Requires that the user be logged in order to gain access to the resource at the specified the URI ."""
decorator = request_passes_test ( lambda r , * args , ** kwargs : r . user . is_authenticated ( ) ) return wrap_object ( obj , decorator )
def pvt ( bars ) : """Price Volume Trend"""
trend = ( ( bars [ 'close' ] - bars [ 'close' ] . shift ( 1 ) ) / bars [ 'close' ] . shift ( 1 ) ) * bars [ 'volume' ] return trend . cumsum ( )
def get_ngram_counts ( self ) : '''Returns a list of n - gram counts Array of classes counts and last item is for corpus'''
ngram_counts = { 'classes' : [ ] , 'corpus' : 0 } doc_ids = self . term_count_n . keys ( ) doc_ids . sort ( ) for doc_id in doc_ids : print self . term_count_n [ doc_id ] class_ngrams = len ( self . term_count_n [ doc_id ] [ 'ngrams' ] ) ngram_counts [ 'classes' ] . append ( class_ngrams ) corpus_ngrams = len ( self . corpus_count_n [ 'ngrams' ] ) ngram_counts [ 'corpus' ] = corpus_ngrams return ngram_counts
def restore_descriptor ( self , table_name , columns , constraints , autoincrement_column = None ) : """Restore descriptor from SQL"""
# Fields fields = [ ] for column in columns : if column . name == autoincrement_column : continue field_type = self . restore_type ( column . type ) field = { 'name' : column . name , 'type' : field_type } if not column . nullable : field [ 'constraints' ] = { 'required' : True } fields . append ( field ) # Primary key pk = [ ] for constraint in constraints : if isinstance ( constraint , sa . PrimaryKeyConstraint ) : for column in constraint . columns : if column . name == autoincrement_column : continue pk . append ( column . name ) # Foreign keys fks = [ ] if self . __dialect == 'postgresql' : for constraint in constraints : if isinstance ( constraint , sa . ForeignKeyConstraint ) : resource = '' own_fields = [ ] foreign_fields = [ ] for element in constraint . elements : own_fields . append ( element . parent . name ) if element . column . table . name != table_name : resource = self . restore_bucket ( element . column . table . name ) foreign_fields . append ( element . column . name ) if len ( own_fields ) == len ( foreign_fields ) == 1 : own_fields = own_fields . pop ( ) foreign_fields = foreign_fields . pop ( ) fks . append ( { 'fields' : own_fields , 'reference' : { 'resource' : resource , 'fields' : foreign_fields } , } ) # Desscriptor descriptor = { } descriptor [ 'fields' ] = fields if len ( pk ) > 0 : if len ( pk ) == 1 : pk = pk . pop ( ) descriptor [ 'primaryKey' ] = pk if len ( fks ) > 0 : descriptor [ 'foreignKeys' ] = fks return descriptor
def deleteLink ( self , linkdict ) : """Delete link if PDF"""
CheckParent ( self ) val = _fitz . Page_deleteLink ( self , linkdict ) if linkdict [ "xref" ] == 0 : return linkid = linkdict [ "id" ] try : linkobj = self . _annot_refs [ linkid ] linkobj . _erase ( ) except : pass return val
def create_api_method_response ( restApiId , resourcePath , httpMethod , statusCode , responseParameters = None , responseModels = None , region = None , key = None , keyid = None , profile = None ) : '''Create API method response for a method on a given resource in the given API CLI Example : . . code - block : : bash salt myminion boto _ apigateway . create _ api _ method _ response restApiId resourcePath httpMethod \ statusCode responseParameters = ' { " name " , " True | False " } ' responseModels = ' { " content - type " , " model " } ' '''
try : resource = describe_api_resource ( restApiId , resourcePath , region = region , key = key , keyid = keyid , profile = profile ) . get ( 'resource' ) if resource : responseParameters = dict ( ) if responseParameters is None else responseParameters responseModels = dict ( ) if responseModels is None else responseModels conn = _get_conn ( region = region , key = key , keyid = keyid , profile = profile ) response = conn . put_method_response ( restApiId = restApiId , resourceId = resource [ 'id' ] , httpMethod = httpMethod , statusCode = str ( statusCode ) , # future lint : disable = blacklisted - function responseParameters = responseParameters , responseModels = responseModels ) return { 'created' : True , 'response' : response } return { 'created' : False , 'error' : 'no such resource' } except ClientError as e : return { 'created' : False , 'error' : __utils__ [ 'boto3.get_error' ] ( e ) }
async def _write_frame_awaiting_response ( self , waiter_id , frame , request , no_wait , check_open = True , drain = True ) : '''Write a frame and set a waiter for the response ( unless no _ wait is set )'''
if no_wait : await self . _write_frame ( frame , request , check_open = check_open , drain = drain ) return None f = self . _set_waiter ( waiter_id ) try : await self . _write_frame ( frame , request , check_open = check_open , drain = drain ) except Exception : self . _get_waiter ( waiter_id ) f . cancel ( ) raise result = await f try : self . _get_waiter ( waiter_id ) except aioamqp . SynchronizationError : # no waiter to get pass return result
def logical_intf_helper ( interface ) : """Logical Interface finder by name . Create if it doesn ' t exist . This is useful when adding logical interfaces to for inline or capture interfaces . : param interface : logical interface name : return str href : href of logical interface"""
if interface is None : return LogicalInterface . get_or_create ( name = 'default_eth' ) . href elif isinstance ( interface , LogicalInterface ) : return interface . href elif interface . startswith ( 'http' ) : return interface return LogicalInterface . get_or_create ( name = interface ) . href
def c32address ( version , hash160hex ) : """> > > c32address ( 22 , ' a46ff88886c2ef9762d970b4d2c63678835bd39d ' ) ' SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7' > > > c32address ( 0 , ' 00000 ' ) ' S000002AA028H ' > > > c32address ( 31 , ' 000001 ' ) ' SZ000005HZ3DVN ' > > > c32address ( 20 , ' 1000001 ' ) ' SM8000004WBEWKC ' > > > c32address ( 26 , ' 100000 ' ) ' ST8000002YBNPV3'"""
if not re . match ( r'^[0-9a-fA-F]{40}$' , hash160hex ) : raise ValueError ( 'Invalid argument: not a hash160 hex string' ) c32string = c32checkEncode ( version , hash160hex ) return 'S{}' . format ( c32string )
def delNode ( self , address ) : """Just send it along if requested , should be able to delete the node even if it isn ' t in our config anywhere . Usually used for normalization ."""
if address in self . nodes : del self . nodes [ address ] self . poly . delNode ( address )
def set_crc ( self ) : """Set Userdata [ 13 ] and Userdata [ 14 ] to the CRC value ."""
data = self . bytes [ 6 : 20 ] crc = int ( 0 ) for b in data : # pylint : disable = unused - variable for bit in range ( 0 , 8 ) : fb = b & 0x01 fb = fb ^ 0x01 if ( crc & 0x8000 ) else fb fb = fb ^ 0x01 if ( crc & 0x4000 ) else fb fb = fb ^ 0x01 if ( crc & 0x1000 ) else fb fb = fb ^ 0x01 if ( crc & 0x0008 ) else fb crc = ( ( crc << 1 ) | fb ) & 0xffff b = b >> 1 self . _userdata [ 'd13' ] = ( crc >> 8 ) & 0xff self . _userdata [ 'd14' ] = crc & 0xff
def disconnectNetToMs ( Facility_presence = 0 , ProgressIndicator_presence = 0 , UserUser_presence = 0 , AllowedActions_presence = 0 ) : """DISCONNECT Section 9.3.7.1"""
a = TpPd ( pd = 0x3 ) b = MessageType ( mesType = 0x25 ) # 00100101 c = Cause ( ) packet = a / b / c if Facility_presence is 1 : d = FacilityHdr ( ieiF = 0x1C , eightBitF = 0x0 ) packet = packet / d if ProgressIndicator_presence is 1 : e = ProgressIndicatorHdr ( ieiPI = 0x1E , eightBitPI = 0x0 ) packet = packet / e if UserUser_presence is 1 : f = UserUserHdr ( ieiUU = 0x7E , eightBitUU = 0x0 ) packet = packet / f if AllowedActions_presence is 1 : g = AllowedActionsHdr ( ieiAA = 0x7B , eightBitAA = 0x0 ) packet = packet / g return packet
def parse_interval ( interval ) : """Attepmt to parse an ISO8601 formatted ` ` interval ` ` . Returns a tuple of ` ` datetime . datetime ` ` and ` ` datetime . timedelta ` ` objects , order dependent on ` ` interval ` ` ."""
a , b = str ( interval ) . upper ( ) . strip ( ) . split ( '/' ) if a [ 0 ] is 'P' and b [ 0 ] is 'P' : raise ParseError ( ) if a [ 0 ] != 'P' and b [ 0 ] != 'P' : return parse_date ( a ) , parse_date ( b ) if a [ 0 ] is 'P' : a = parse_duration ( a ) else : a = parse_date ( a ) if b [ 0 ] is 'P' : b = parse_duration ( b ) else : b = parse_date ( b ) return a , b
def view_list ( self ) : '''return a list of polygon indexes lists for the waypoints'''
done = set ( ) ret = [ ] while len ( done ) != self . count ( ) : p = self . view_indexes ( done ) if len ( p ) > 0 : ret . append ( p ) return ret
def inspect_distribution ( self , image , auth_config = None ) : """Get image digest and platform information by contacting the registry . Args : image ( str ) : The image name to inspect auth _ config ( dict ) : Override the credentials that are found in the config for this request . ` ` auth _ config ` ` should contain the ` ` username ` ` and ` ` password ` ` keys to be valid . Returns : ( dict ) : A dict containing distribution data Raises : : py : class : ` docker . errors . APIError ` If the server returns an error ."""
registry , _ = auth . resolve_repository_name ( image ) headers = { } if auth_config is None : header = auth . get_config_header ( self , registry ) if header : headers [ 'X-Registry-Auth' ] = header else : log . debug ( 'Sending supplied auth config' ) headers [ 'X-Registry-Auth' ] = auth . encode_header ( auth_config ) url = self . _url ( "/distribution/{0}/json" , image ) return self . _result ( self . _get ( url , headers = headers ) , True )
def deconv_stride2_multistep ( x , nbr_steps , output_filters , name = None , reuse = None ) : """Use a deconvolution to upsample x by 2 * * ` nbr _ steps ` . Args : x : a ` Tensor ` with shape ` [ batch , spatial , depth ] ` or ` [ batch , spatial _ 1 , spatial _ 2 , depth ] ` nbr _ steps : an int specifying the number of doubling upsample rounds to apply . output _ filters : an int specifying the filter count for the deconvolutions name : a string reuse : a boolean Returns : a ` Tensor ` with shape ` [ batch , spatial * ( 2 * * nbr _ steps ) , output _ filters ] ` or ` [ batch , spatial _ 1 * ( 2 * * nbr _ steps ) , spatial _ 2 * ( 2 * * nbr _ steps ) , output _ filters ] `"""
with tf . variable_scope ( name , default_name = "deconv_stride2_multistep" , values = [ x ] , reuse = reuse ) : def deconv1d ( cur , i ) : cur_shape = shape_list ( cur ) thicker = conv ( cur , output_filters * 2 , ( 1 , 1 ) , padding = "SAME" , activation = tf . nn . relu , name = "deconv1d" + str ( i ) ) return tf . reshape ( thicker , [ cur_shape [ 0 ] , cur_shape [ 1 ] * 2 , 1 , output_filters ] ) def deconv2d ( cur , i ) : thicker = conv ( cur , output_filters * 4 , ( 1 , 1 ) , padding = "SAME" , activation = tf . nn . relu , name = "deconv2d" + str ( i ) ) return tf . depth_to_space ( thicker , 2 ) cur = x for i in range ( nbr_steps ) : if cur . get_shape ( ) [ 2 ] == 1 : cur = deconv1d ( cur , i ) else : cur_dim = shape_list ( cur ) [ 2 ] if isinstance ( cur_dim , int ) : if cur_dim == 1 : cur = deconv1d ( cur , i ) else : cur = deconv2d ( cur , i ) else : cur = tf . cond ( tf . equal ( cur_dim , 1 ) , lambda idx = i : deconv1d ( cur , idx ) , lambda idx = i : deconv2d ( cur , idx ) ) return cur
def _get_seal_key_ntlm1 ( negotiate_flags , exported_session_key ) : """3.4.5.3 SEALKEY Calculates the seal _ key used to seal ( encrypt ) messages . This for authentication where NTLMSSP _ NEGOTIATE _ EXTENDED _ SESSIONSECURITY has not been negotiated . Will weaken the keys if NTLMSSP _ NEGOTIATE _ 56 is not negotiated it will default to the 40 - bit key @ param negotiate _ flags : The negotiate _ flags structure sent by the server @ param exported _ session _ key : A 128 - bit session key used to derive signing and sealing keys @ return seal _ key : Key used to seal messages"""
if negotiate_flags & NegotiateFlags . NTLMSSP_NEGOTIATE_56 : seal_key = exported_session_key [ : 7 ] + binascii . unhexlify ( 'a0' ) else : seal_key = exported_session_key [ : 5 ] + binascii . unhexlify ( 'e538b0' ) return seal_key
def _discover_ks_version ( self , url ) : '''Keystone API version discovery'''
result = salt . utils . http . query ( url , backend = 'requests' , status = True , decode = True , decode_type = 'json' ) versions = json . loads ( result [ 'body' ] ) try : links = [ ver [ 'links' ] for ver in versions [ 'versions' ] [ 'values' ] if ver [ 'status' ] == 'stable' ] [ 0 ] if result [ 'status' ] == 300 else versions [ 'version' ] [ 'links' ] resurl = [ link [ 'href' ] for link in links if link [ 'rel' ] == 'self' ] [ 0 ] return self . _get_version_from_url ( resurl ) except KeyError as exc : raise SaltCloudSystemExit ( 'KeyError: key {0} not found in API response: {1}' . format ( exc , versions ) )
def _config_win32_nameservers ( self , nameservers ) : """Configure a NameServer registry entry ."""
# we call str ( ) on nameservers to convert it from unicode to ascii nameservers = str ( nameservers ) split_char = self . _determine_split_char ( nameservers ) ns_list = nameservers . split ( split_char ) for ns in ns_list : if not ns in self . nameservers : self . nameservers . append ( ns )
def _find_statement_by_line ( node , line ) : """Extracts the statement on a specific line from an AST . If the line number of node matches line , it will be returned ; otherwise its children are iterated and the function is called recursively . : param node : An astroid node . : type node : astroid . bases . NodeNG : param line : The line number of the statement to extract . : type line : int : returns : The statement on the line , or None if no statement for the line can be found . : rtype : astroid . bases . NodeNG or None"""
if isinstance ( node , ( nodes . ClassDef , nodes . FunctionDef ) ) : # This is an inaccuracy in the AST : the nodes that can be # decorated do not carry explicit information on which line # the actual definition ( class / def ) , but . fromline seems to # be close enough . node_line = node . fromlineno else : node_line = node . lineno if node_line == line : return node for child in node . get_children ( ) : result = _find_statement_by_line ( child , line ) if result : return result return None
def get_temperature ( self ) : """Get current temperature in celsius ."""
try : request = requests . get ( '{}/temp' . format ( self . resource ) , timeout = self . timeout , allow_redirects = False ) self . temperature = request . json ( ) [ 'compensated' ] return self . temperature except requests . exceptions . ConnectionError : raise exceptions . MyStromConnectionError ( ) except ValueError : raise exceptions . MyStromNotVersionTwoSwitch ( )
def run_command_async ( self , msg ) : ''': type message _ generator : generator of dict : param message _ generator : Generates messages from slack that should be run : type fire _ all : bool : param fire _ all : Whether to also fire messages to the event bus : type tag : str : param tag : The tag to send to use to send to the event bus : type interval : int : param interval : time to wait between ending a loop and beginning the next'''
log . debug ( 'Going to run a command asynchronous' ) runner_functions = sorted ( salt . runner . Runner ( __opts__ ) . functions ) # Parse args and kwargs cmd = msg [ 'cmdline' ] [ 0 ] args , kwargs = self . parse_args_and_kwargs ( msg [ 'cmdline' ] ) # Check for pillar string representation of dict and convert it to dict if 'pillar' in kwargs : kwargs . update ( pillar = ast . literal_eval ( kwargs [ 'pillar' ] ) ) # Check for target . Otherwise assume None target = msg [ 'target' ] [ 'target' ] # Check for tgt _ type . Otherwise assume glob tgt_type = msg [ 'target' ] [ 'tgt_type' ] log . debug ( 'target_type is: %s' , tgt_type ) if cmd in runner_functions : runner = salt . runner . RunnerClient ( __opts__ ) log . debug ( 'Command %s will run via runner_functions' , cmd ) # pylint is tripping # pylint : disable = missing - whitespace - after - comma job_id_dict = runner . asynchronous ( cmd , { 'args' : args , 'kwargs' : kwargs } ) job_id = job_id_dict [ 'jid' ] # Default to trying to run as a client module . else : local = salt . client . LocalClient ( ) log . debug ( 'Command %s will run via local.cmd_async, targeting %s' , cmd , target ) log . debug ( 'Running %s, %s, %s, %s, %s' , target , cmd , args , kwargs , tgt_type ) # according to https : / / github . com / saltstack / salt - api / issues / 164 , tgt _ type has changed to expr _ form job_id = local . cmd_async ( six . text_type ( target ) , cmd , arg = args , kwarg = kwargs , tgt_type = six . text_type ( tgt_type ) ) log . info ( 'ret from local.cmd_async is %s' , job_id ) return job_id
def compute_node_positions ( self ) : """Computes nodes positions . Arranges nodes in a line starting at ( x , y ) = ( 0,0 ) . Node radius is assumed to be equal to 0.5 units . Nodes are placed at integer locations ."""
xs = [ 0 ] * len ( self . nodes ) ys = [ 0 ] * len ( self . nodes ) for i , _ in enumerate ( self . nodes [ 1 : ] , start = 1 ) : prev_r = self . node_sizes [ i - 1 ] / 2 curr_r = self . node_sizes [ i ] / 2 xs [ i ] = xs [ i - 1 ] + prev_r + curr_r self . node_coords = { "x" : xs , "y" : ys }
def namedb_get_all_importing_namespace_hashes ( self , current_block ) : """Get the list of all non - expired preordered and revealed namespace hashes ."""
query = "SELECT preorder_hash FROM namespaces WHERE (op = ? AND reveal_block < ?) OR (op = ? AND block_number < ?);" args = ( NAMESPACE_REVEAL , current_block + NAMESPACE_REVEAL_EXPIRE , NAMESPACE_PREORDER , current_block + NAMESPACE_PREORDER_EXPIRE ) namespace_rows = namedb_query_execute ( cur , query , args ) ret = [ ] for namespace_row in namespace_rows : ret . append ( namespace_row [ 'preorder_hash' ] ) return ret
def from_model_files ( cls , limits , input_model , investigation_time = 1.0 , simple_mesh_spacing = 1.0 , complex_mesh_spacing = 5.0 , mfd_width = 0.1 , area_discretisation = 10.0 ) : """Reads the hazard model from a file : param list limits : Grid configuration [ west , east , xspc , south , north , yspc , upper , lower , zspc ] : param str input _ model : Path to input source model : param float investigation _ time : Investigation time of Poisson model : param float simple _ mesh _ spacing : Rupture mesh spacing of simple fault ( km ) : param float complex _ mesh _ spacing : Rupture mesh spacing of complex fault ( km ) : param float mfd _ width : Spacing ( in magnitude units ) of MFD : param float area _ discretisation : Spacing of discretisation of area source ( km )"""
converter = SourceConverter ( investigation_time , simple_mesh_spacing , complex_mesh_spacing , mfd_width , area_discretisation ) sources = [ ] for grp in nrml . to_python ( input_model , converter ) : sources . extend ( grp . sources ) return cls ( limits , sources , area_discretisation )
def safe_py_code ( code ) : '''Check a string to see if it has any potentially unsafe routines which could be executed via python , this routine is used to improve the safety of modules suct as virtualenv'''
bads = ( 'import' , ';' , 'subprocess' , 'eval' , 'open' , 'file' , 'exec' , 'input' ) for bad in bads : if code . count ( bad ) : return False return True
def __path ( self , path ) : """Adds the prefix to the given path : param path : Z - Path : return : Prefixed Z - Path"""
if path . startswith ( self . __prefix ) : return path return "{}{}" . format ( self . __prefix , path )
def deriv2 ( self , p ) : """Second derivative of the power transform Parameters p : array - like Mean parameters Returns g ' ' ( p ) : array Second derivative of the power transform of ` p ` Notes g ' ' ( ` p ` ) = ` power ` * ( ` power ` - 1 ) * ` p ` * * ( ` power ` - 2)"""
return self . power * ( self . power - 1 ) * np . power ( p , self . power - 2 )
def _find_vpcs ( vpc_id = None , vpc_name = None , cidr = None , tags = None , region = None , key = None , keyid = None , profile = None ) : '''Given VPC properties , find and return matching VPC ids . Borrowed from boto _ vpc ; these could be refactored into a common library'''
if all ( ( vpc_id , vpc_name ) ) : raise SaltInvocationError ( 'Only one of vpc_name or vpc_id may be ' 'provided.' ) if not any ( ( vpc_id , vpc_name , tags , cidr ) ) : raise SaltInvocationError ( 'At least one of the following must be ' 'provided: vpc_id, vpc_name, cidr or tags.' ) local_get_conn = __utils__ [ 'boto.get_connection_func' ] ( 'vpc' ) conn = local_get_conn ( region = region , key = key , keyid = keyid , profile = profile ) filter_parameters = { 'filters' : { } } if vpc_id : filter_parameters [ 'vpc_ids' ] = [ vpc_id ] if cidr : filter_parameters [ 'filters' ] [ 'cidr' ] = cidr if vpc_name : filter_parameters [ 'filters' ] [ 'tag:Name' ] = vpc_name if tags : for tag_name , tag_value in six . iteritems ( tags ) : filter_parameters [ 'filters' ] [ 'tag:{0}' . format ( tag_name ) ] = tag_value vpcs = conn . get_all_vpcs ( ** filter_parameters ) log . debug ( 'The filters criteria %s matched the following VPCs:%s' , filter_parameters , vpcs ) if vpcs : return [ vpc . id for vpc in vpcs ] else : return [ ]
def merge_option_dicts ( old_opts , new_opts ) : """Update the old _ opts option dictionary with the options defined in new _ opts . Instead of a shallow update as would be performed by calling old _ opts . update ( new _ opts ) , this updates the dictionaries of all option types separately . Given two dictionaries old _ opts = { ' a ' : { ' x ' : ' old ' , ' y ' : ' old ' } } and new _ opts = { ' a ' : { ' y ' : ' new ' , ' z ' : ' new ' } , ' b ' : { ' k ' : ' new ' } } this returns a dictionary { ' a ' : { ' x ' : ' old ' , ' y ' : ' new ' , ' z ' : ' new ' } , ' b ' : { ' k ' : ' new ' } }"""
merged = dict ( old_opts ) for option_type , options in new_opts . items ( ) : if option_type not in merged : merged [ option_type ] = { } merged [ option_type ] . update ( options ) return merged
def _spawn_background_rendering ( self , rate = 5.0 ) : """Spawns a thread that updates the render window . Sometimes directly modifiying object data doesn ' t trigger Modified ( ) and upstream objects won ' t be updated . This ensures the render window stays updated without consuming too many resources ."""
self . render_trigger . connect ( self . ren_win . Render ) twait = rate ** - 1 def render ( ) : while self . active : time . sleep ( twait ) self . _render ( ) self . render_thread = Thread ( target = render ) self . render_thread . start ( )
def combine_reports ( original , new ) : """Combines two gcov reports for a file into one by adding the number of hits on each line"""
if original is None : return new report = { } report [ 'name' ] = original [ 'name' ] report [ 'source_digest' ] = original [ 'source_digest' ] coverage = [ ] for original_num , new_num in zip ( original [ 'coverage' ] , new [ 'coverage' ] ) : if original_num is None : coverage . append ( new_num ) elif new_num is None : coverage . append ( original_num ) else : coverage . append ( original_num + new_num ) report [ 'coverage' ] = coverage return report
def _hlink ( self ) : """Reference to the ` a : hlinkClick ` or ` h : hlinkHover ` element for this click action . Returns | None | if the element is not present ."""
if self . _hover : return self . _element . hlinkHover return self . _element . hlinkClick
def set_encode_key_value ( self , value , store_type ) : """Save the key value base on it ' s storage type ."""
self . _store_type = store_type if store_type == PUBLIC_KEY_STORE_TYPE_HEX : self . _value = value . hex ( ) elif store_type == PUBLIC_KEY_STORE_TYPE_BASE64 : self . _value = b64encode ( value ) . decode ( ) elif store_type == PUBLIC_KEY_STORE_TYPE_BASE85 : self . _value = b85encode ( value ) . decode ( ) elif store_type == PUBLIC_KEY_STORE_TYPE_JWK : # TODO : need to decide on which jwk library to import ? raise NotImplementedError else : self . _value = value return value
def wait ( * args , ** kwargs ) : """Wrapping ' wait ( ) ' method of ' waiting ' library with default parameter values . WebDriverException is ignored in the expected exceptions by default ."""
kwargs . setdefault ( 'sleep_seconds' , ( 1 , None ) ) kwargs . setdefault ( 'expected_exceptions' , WebDriverException ) kwargs . setdefault ( 'timeout_seconds' , webium . settings . wait_timeout ) return wait_lib ( * args , ** kwargs )
def new_label_descriptors ( defaults , keys ) : """create labels for the metric _ descriptor that will be sent to Stackdriver Monitoring"""
label_descriptors = [ ] for lk in itertools . chain . from_iterable ( ( defaults . keys ( ) , keys ) ) : label = { } label [ "key" ] = sanitize_label ( lk . key ) label [ "description" ] = lk . description label_descriptors . append ( label ) return label_descriptors
def set_axis_color ( axis , color , alpha = None ) : """Sets the spine color of all sides of an axis ( top , right , bottom , left ) ."""
for side in ( 'top' , 'right' , 'bottom' , 'left' ) : spine = axis . spines [ side ] spine . set_color ( color ) if alpha is not None : spine . set_alpha ( alpha )
def get_slices ( self , idx , shape ) -> Tuple [ IntOrSlice , ... ] : """Return a | tuple | of one | int | and some | slice | objects to accesses all values of a certain device within | NetCDFVariableDeep . array | . > > > from hydpy . core . netcdftools import NetCDFVariableDeep > > > ncvar = NetCDFVariableDeep ( ' test ' , isolate = False , timeaxis = 1) > > > ncvar . get _ slices ( 2 , [ 3 ] ) (2 , slice ( None , None , None ) , slice ( 0 , 3 , None ) ) > > > ncvar . get _ slices ( 4 , ( 1 , 2 ) ) (4 , slice ( None , None , None ) , slice ( 0 , 1 , None ) , slice ( 0 , 2 , None ) ) > > > ncvar = NetCDFVariableDeep ( ' test ' , isolate = False , timeaxis = 0) > > > ncvar . get _ slices ( 4 , ( 1 , 2 ) ) ( slice ( None , None , None ) , 4 , slice ( 0 , 1 , None ) , slice ( 0 , 2 , None ) )"""
slices = list ( self . get_timeplaceslice ( idx ) ) for length in shape : slices . append ( slice ( 0 , length ) ) return tuple ( slices )
def patch_relationship ( self , session , json_data , api_type , obj_id , rel_key ) : """Replacement of relationship values . : param session : SQLAlchemy session : param json _ data : Request JSON Data : param api _ type : Type of the resource : param obj _ id : ID of the resource : param rel _ key : Key of the relationship to fetch"""
model = self . _fetch_model ( api_type ) resource = self . _fetch_resource ( session , api_type , obj_id , Permissions . EDIT ) if rel_key not in resource . __jsonapi_map_to_py__ . keys ( ) : raise RelationshipNotFoundError ( resource , resource , rel_key ) py_key = resource . __jsonapi_map_to_py__ [ rel_key ] relationship = self . _get_relationship ( resource , py_key , Permissions . EDIT ) self . _check_json_data ( json_data ) session . add ( resource ) remote_side = relationship . back_populates try : if relationship . direction == MANYTOONE : if not isinstance ( json_data [ 'data' ] , dict ) and json_data [ 'data' ] is not None : raise ValidationError ( 'Provided data must be a hash.' ) related = getattr ( resource , relationship . key ) check_permission ( related , None , Permissions . EDIT ) check_permission ( related , remote_side , Permissions . EDIT ) setter = get_rel_desc ( resource , relationship . key , RelationshipActions . SET ) if json_data [ 'data' ] is None : setter ( resource , None ) else : to_relate = self . _fetch_resource ( session , json_data [ 'data' ] [ 'type' ] , json_data [ 'data' ] [ 'id' ] , Permissions . EDIT ) check_permission ( to_relate , remote_side , Permissions . EDIT ) setter ( resource , to_relate ) else : if not isinstance ( json_data [ 'data' ] , list ) : raise ValidationError ( 'Provided data must be an array.' ) related = getattr ( resource , relationship . key ) remover = get_rel_desc ( resource , relationship . key , RelationshipActions . DELETE ) appender = get_rel_desc ( resource , relationship . key , RelationshipActions . APPEND ) for item in related : check_permission ( item , None , Permissions . EDIT ) remote = item . __mapper__ . relationships [ remote_side ] if remote . direction == MANYTOONE : check_permission ( item , remote_side , Permissions . EDIT ) else : check_permission ( item , remote_side , Permissions . DELETE ) remover ( resource , item ) for item in json_data [ 'data' ] : to_relate = self . _fetch_resource ( session , item [ 'type' ] , item [ 'id' ] , Permissions . EDIT ) remote = to_relate . __mapper__ . relationships [ remote_side ] if remote . direction == MANYTOONE : check_permission ( to_relate , remote_side , Permissions . EDIT ) else : check_permission ( to_relate , remote_side , Permissions . CREATE ) appender ( resource , to_relate ) session . commit ( ) except KeyError : raise ValidationError ( 'Incompatible Type' ) return self . get_relationship ( session , { } , model . __jsonapi_type__ , resource . id , rel_key )
def qteAddMiniApplet ( self , appletObj : QtmacsApplet ) : """Install ` ` appletObj ` ` as the mini applet in the window layout . At any given point there can ever only be one mini applet in the entire Qtmacs application , irrespective of how many windows are open . Note that this method does nothing if a custom mini applet is already active . Use ` ` qteKillMiniApplet ` ` to remove that one first before installing a new one . | Args | * ` ` appletObj ` ` ( * * QtmacsApplet * * ) : the new mini applet . | Returns | * * * bool * * : if * * True * * the mini applet was installed successfully . | Raises | * * * QtmacsArgumentError * * if at least one argument has an invalid type ."""
# Do nothing if a custom mini applet has already been # installed . if self . _qteMiniApplet is not None : msg = 'Cannot replace mini applet more than once.' self . qteLogger . warning ( msg ) return False # Arrange all registered widgets inside this applet # automatically if the mini applet object did not install its # own layout . if appletObj . layout ( ) is None : appLayout = QtGui . QHBoxLayout ( ) for handle in appletObj . _qteAdmin . widgetList : appLayout . addWidget ( handle ) appletObj . setLayout ( appLayout ) # Now that we have decided to install this mini applet , keep a # reference to it and set the mini applet flag in the # applet . This flag is necessary for some methods to separate # conventional applets from mini applets . appletObj . _qteAdmin . isMiniApplet = True self . _qteMiniApplet = appletObj # Shorthands . app = self . _qteActiveApplet appWin = self . qteActiveWindow ( ) # Remember which window and applet spawned this mini applet . self . _qteMiniApplet . _qteCallingApplet = app self . _qteMiniApplet . _qteCallingWindow = appWin del app # Add the mini applet to the applet registry , ie . for most # purposes the mini applet is treated like any other applet . self . _qteAppletList . insert ( 0 , self . _qteMiniApplet ) # Add the mini applet to the respective splitter in the window # layout and show it . appWin . qteLayoutSplitter . addWidget ( self . _qteMiniApplet ) self . _qteMiniApplet . show ( True ) # Give focus to first focusable widget in the mini applet # applet ( if one exists ) wid = self . _qteMiniApplet . qteNextWidget ( numSkip = 0 ) self . _qteMiniApplet . qteMakeWidgetActive ( wid ) self . qteMakeAppletActive ( self . _qteMiniApplet ) # Mini applet was successfully installed . return True
def get_processes ( self ) : """Grab a shuffled list of all currently running process names"""
procs = set ( ) try : # POSIX ps , so it should work in most environments where doge would p = sp . Popen ( [ 'ps' , '-A' , '-o' , 'comm=' ] , stdout = sp . PIPE ) output , error = p . communicate ( ) if sys . version_info > ( 3 , 0 ) : output = output . decode ( 'utf-8' ) for comm in output . split ( '\n' ) : name = comm . split ( '/' ) [ - 1 ] # Filter short and weird ones if name and len ( name ) >= 2 and ':' not in name : procs . add ( name ) finally : # Either it executed properly or no ps was found . proc_list = list ( procs ) random . shuffle ( proc_list ) return proc_list
def derived_from_all ( self , identities : List [ QualName ] ) -> MutableSet [ QualName ] : """Return list of identities transitively derived from all ` identity ` ."""
if not identities : return set ( ) res = self . derived_from ( identities [ 0 ] ) for id in identities [ 1 : ] : res &= self . derived_from ( id ) return res
def read ( filename , ** kwargs ) : """Read a generic input file into a recarray . Accepted file formats : [ . fits , . fz , . npy , . csv , . txt , . dat ] Parameters : filename : input file name kwargs : keyword arguments for the reader Returns : recarray : data array"""
base , ext = os . path . splitext ( filename ) if ext in ( '.fits' , '.fz' ) : # Abstract fits here . . . return fitsio . read ( filename , ** kwargs ) elif ext in ( '.npy' ) : return np . load ( filename , ** kwargs ) elif ext in ( '.csv' ) : return np . recfromcsv ( filename , ** kwargs ) elif ext in ( '.txt' , '.dat' ) : return np . genfromtxt ( filename , ** kwargs ) msg = "Unrecognized file type: %s" % filename raise ValueError ( msg )
def create_issue ( self , title , body = None , assignee = None , milestone = None , labels = None ) : """Creates an issue on this repository . : param str title : ( required ) , title of the issue : param str body : ( optional ) , body of the issue : param str assignee : ( optional ) , login of the user to assign the issue to : param int milestone : ( optional ) , id number of the milestone to attribute this issue to ( e . g . ` ` m ` ` is a : class : ` Milestone < github3 . issues . milestone . Milestone > ` object , ` ` m . number ` ` is what you pass here . ) : param labels : ( optional ) , labels to apply to this issue : type labels : list of strings : returns : : class : ` Issue < github3 . issues . issue . Issue > ` if successful , otherwise None"""
issue = { 'title' : title , 'body' : body , 'assignee' : assignee , 'milestone' : milestone , 'labels' : labels } self . _remove_none ( issue ) json = None if issue : url = self . _build_url ( 'issues' , base_url = self . _api ) json = self . _json ( self . _post ( url , data = issue ) , 201 ) return Issue ( json , self ) if json else None
def wrap_content_as_binary_if_needed ( func_ , * args , ** kwargs ) : """destination ( rethinkdb ) needs the id field as primary key put the Name field into the ID field : param func _ : : param args : : param kwargs : : return :"""
assert isinstance ( args [ 0 ] , dict ) try : args [ 0 ] [ CONTENT_FIELD ] = BINARY ( args [ 0 ] . get ( CONTENT_FIELD , b"" ) ) except ( r . errors . ReqlDriverCompileError , AttributeError ) : # pragma : no cover pass # toss in the object as string return func_ ( * args , ** kwargs )
def _publish_deferred_messages ( self ) : """Called when pika is connected and has a channel open to publish any requests buffered ."""
global message_stack if not self . _rabbitmq_is_closed and message_stack : LOGGER . info ( 'Publishing %i deferred message(s)' , len ( message_stack ) ) while message_stack : self . _publish_message ( * message_stack . pop ( ) )
def from_file ( cls , filename , keep_neg = False , ** kwargs ) : """Create a spectrum from file . If filename has ' fits ' or ' fit ' suffix , it is read as FITS . Otherwise , it is read as ASCII . Parameters filename : str Spectrum filename . keep _ neg : bool See ` ~ synphot . models . Empirical1D ` . kwargs : dict Keywords acceptable by : func : ` ~ synphot . specio . read _ fits _ spec ` ( if FITS ) or : func : ` ~ synphot . specio . read _ ascii _ spec ` ( if ASCII ) . Returns sp : ` SourceSpectrum ` Empirical spectrum ."""
header , wavelengths , fluxes = specio . read_spec ( filename , ** kwargs ) return cls ( Empirical1D , points = wavelengths , lookup_table = fluxes , keep_neg = keep_neg , meta = { 'header' : header } )
def pending_transactions ( server ) : """get the no . of pending transactions ( 0 confirmations ) on a server"""
namecoind = NamecoindClient ( server , NAMECOIND_PORT , NAMECOIND_USER , NAMECOIND_PASSWD ) reply = namecoind . listtransactions ( "" , 10000 ) counter = 0 for i in reply : if i [ 'confirmations' ] == 0 : counter += 1 return counter
def p_const_value_primitive ( self , p ) : '''const _ value _ primitive : INTCONSTANT | DUBCONSTANT | LITERAL | const _ bool'''
p [ 0 ] = ast . ConstPrimitiveValue ( p [ 1 ] , lineno = p . lineno ( 1 ) )
def setObjectName ( self , objectName ) : """Updates the style sheet for this line edit when the name changes . : param objectName | < str >"""
super ( XLineEdit , self ) . setObjectName ( objectName ) self . adjustStyleSheet ( )
def _clip_line ( self , line_pt_1 , line_pt_2 ) : """clip line to canvas"""
x_min = min ( line_pt_1 [ 0 ] , line_pt_2 [ 0 ] ) x_max = max ( line_pt_1 [ 0 ] , line_pt_2 [ 0 ] ) y_min = min ( line_pt_1 [ 1 ] , line_pt_2 [ 1 ] ) y_max = max ( line_pt_1 [ 1 ] , line_pt_2 [ 1 ] ) extent = self . extent ( ) if line_pt_1 [ 0 ] == line_pt_2 [ 0 ] : return ( ( line_pt_1 [ 0 ] , max ( y_min , extent [ 1 ] ) ) , ( line_pt_1 [ 0 ] , min ( y_max , extent [ 3 ] ) ) ) if line_pt_1 [ 1 ] == line_pt_2 [ 1 ] : return ( ( max ( x_min , extent [ 0 ] ) , line_pt_1 [ 1 ] ) , ( min ( x_max , extent [ 2 ] ) , line_pt_1 [ 1 ] ) ) if ( ( extent [ 0 ] <= line_pt_1 [ 0 ] < extent [ 2 ] ) and ( extent [ 1 ] <= line_pt_1 [ 1 ] < extent [ 3 ] ) and ( extent [ 0 ] <= line_pt_2 [ 0 ] < extent [ 2 ] ) and ( extent [ 1 ] <= line_pt_2 [ 1 ] < extent [ 3 ] ) ) : return line_pt_1 , line_pt_2 ts = [ 0.0 , 1.0 , float ( extent [ 0 ] - line_pt_1 [ 0 ] ) / ( line_pt_2 [ 0 ] - line_pt_1 [ 0 ] ) , float ( extent [ 2 ] - line_pt_1 [ 0 ] ) / ( line_pt_2 [ 0 ] - line_pt_1 [ 0 ] ) , float ( extent [ 1 ] - line_pt_1 [ 1 ] ) / ( line_pt_2 [ 1 ] - line_pt_1 [ 1 ] ) , float ( extent [ 3 ] - line_pt_1 [ 1 ] ) / ( line_pt_2 [ 1 ] - line_pt_1 [ 1 ] ) ] ts . sort ( ) if ( ts [ 2 ] < 0 ) or ( ts [ 2 ] >= 1 ) or ( ts [ 3 ] < 0 ) or ( ts [ 2 ] >= 1 ) : return None result = [ ( pt_1 + t * ( pt_2 - pt_1 ) ) for t in ( ts [ 2 ] , ts [ 3 ] ) for ( pt_1 , pt_2 ) in zip ( line_pt_1 , line_pt_2 ) ] return ( result [ : 2 ] , result [ 2 : ] )
def rowsAfterValue ( self , value , count ) : """Retrieve some rows at or after a given sort - column value . @ param value : Starting value in the index for the current sort column at which to start returning results . Rows with a column value for the current sort column which is greater than or equal to this value will be returned . @ type value : Some type compatible with the current sort column , or None , to specify the beginning of the data . @ param count : The maximum number of rows to return . @ type count : C { int } @ return : A list of row data , ordered by the current sort column , beginning at C { value } and containing at most C { count } elements ."""
if value is None : query = self . inequalityQuery ( None , count , True ) else : pyvalue = self . _toComparableValue ( value ) currentSortAttribute = self . currentSortColumn . sortAttribute ( ) query = self . inequalityQuery ( currentSortAttribute >= pyvalue , count , True ) return self . constructRows ( query )
def init_disk_cache ( self ) : """Initialize the on - disk version of the cache ."""
try : # Cleanup old disk cache files path = self . disk_cache_location os . remove ( path ) except Exception : pass self . disk_cache_location = os . path . join ( tempfile . mkdtemp ( ) , 'cache' )
def prepare_image ( tarpath , outfolder , ** kwargs ) : """Unpack the OS image stored at tarpath to outfolder . Prepare the unpacked image for use as a VR base image ."""
outfolder = path . Path ( outfolder ) untar ( tarpath , outfolder , ** kwargs ) # Some OSes have started making / etc / resolv . conf into a symlink to # / run / resolv . conf . That prevents us from bind - mounting to that # location . So delete that symlink , if it exists . resolv_path = outfolder / 'etc' / 'resolv.conf' if resolv_path . islink ( ) : resolv_path . remove ( ) . write_text ( '' , encoding = 'ascii' )
def serial_control_send ( self , device , flags , timeout , baudrate , count , data , force_mavlink1 = False ) : '''Control a serial port . This can be used for raw access to an onboard serial peripheral such as a GPS or telemetry radio . It is designed to make it possible to update the devices firmware via MAVLink messages or change the devices settings . A message with zero bytes can be used to change just the baudrate . device : See SERIAL _ CONTROL _ DEV enum ( uint8 _ t ) flags : See SERIAL _ CONTROL _ FLAG enum ( uint8 _ t ) timeout : Timeout for reply data in milliseconds ( uint16 _ t ) baudrate : Baudrate of transfer . Zero means no change . ( uint32 _ t ) count : how many bytes in this transfer ( uint8 _ t ) data : serial data ( uint8 _ t )'''
return self . send ( self . serial_control_encode ( device , flags , timeout , baudrate , count , data ) , force_mavlink1 = force_mavlink1 )