repository_name
stringlengths 7
56
| func_path_in_repository
stringlengths 10
101
| func_name
stringlengths 12
78
| language
stringclasses 1
value | func_code_string
stringlengths 74
11.9k
| func_documentation_string
stringlengths 3
8.03k
| split_name
stringclasses 1
value | func_code_url
stringlengths 98
213
| enclosing_scope
stringlengths 42
98.2k
|
---|---|---|---|---|---|---|---|---|
chaintope/bitcoinrb | lib/bitcoin/ext_key.rb | Bitcoin.ExtPubkey.derive | ruby | def derive(number)
new_key = ExtPubkey.new
new_key.depth = depth + 1
new_key.number = number
new_key.parent_fingerprint = fingerprint
raise 'hardened key is not support' if number > (HARDENED_THRESHOLD - 1)
data = pub.htb << [number].pack('N')
l = Bitcoin.hmac_sha512(chain_code, data)
left = l[0..31].bth.to_i(16)
raise 'invalid key' if left >= CURVE_ORDER
p1 = Bitcoin::Secp256k1::GROUP.generator.multiply_by_scalar(left)
p2 = Bitcoin::Key.new(pubkey: pubkey, key_type: key_type).to_point
new_key.pubkey = ECDSA::Format::PointOctetString.encode(p1 + p2, compression: true).bth
new_key.chain_code = l[32..-1]
new_key.ver = version
new_key
end | derive child key | train | https://github.com/chaintope/bitcoinrb/blob/39396e4c9815214d6b0ab694fa8326978a7f5438/lib/bitcoin/ext_key.rb#L254-L270 | class ExtPubkey
attr_accessor :ver
attr_accessor :depth
attr_accessor :number
attr_accessor :chain_code
attr_accessor :pubkey # hex format
attr_accessor :parent_fingerprint
# serialize extended pubkey
def to_payload
version.htb << [depth].pack('C') <<
parent_fingerprint.htb << [number].pack('N') << chain_code << pub.htb
end
def pub
pubkey
end
def hash160
Bitcoin.hash160(pub)
end
# get address
def addr
case version
when Bitcoin.chain_params.bip49_pubkey_p2wpkh_p2sh_version
key.to_nested_p2wpkh
when Bitcoin.chain_params.bip84_pubkey_p2wpkh_version
key.to_p2wpkh
else
key.to_p2pkh
end
end
# get key object
# @return [Bitcoin::Key]
def key
Bitcoin::Key.new(pubkey: pubkey, key_type: key_type)
end
# get key identifier
def identifier
Bitcoin.hash160(pub)
end
# get fingerprint
def fingerprint
identifier.slice(0..7)
end
# Base58 encoded extended pubkey
def to_base58
h = to_payload.bth
hex = h + Bitcoin.calc_checksum(h)
Base58.encode(hex)
end
# whether hardened key.
def hardened?
number >= HARDENED_THRESHOLD
end
# derive child key
# get version bytes using serialization format
def version
return ExtPubkey.version_from_purpose(number) if depth == 1
ver ? ver : Bitcoin.chain_params.extended_pubkey_version
end
# get key type defined by BIP-178 using version.
def key_type
v = version
case v
when Bitcoin.chain_params.bip49_pubkey_p2wpkh_p2sh_version
Bitcoin::Key::TYPES[:pw2pkh_p2sh]
when Bitcoin.chain_params.bip84_pubkey_p2wpkh_version
Bitcoin::Key::TYPES[:p2wpkh]
when Bitcoin.chain_params.extended_pubkey_version
Bitcoin::Key::TYPES[:compressed]
end
end
def self.parse_from_payload(payload)
buf = StringIO.new(payload)
ext_pubkey = ExtPubkey.new
ext_pubkey.ver = buf.read(4).bth # version
raise 'An unsupported version byte was specified.' unless ExtPubkey.support_version?(ext_pubkey.ver)
ext_pubkey.depth = buf.read(1).unpack('C').first
ext_pubkey.parent_fingerprint = buf.read(4).bth
ext_pubkey.number = buf.read(4).unpack('N').first
ext_pubkey.chain_code = buf.read(32)
ext_pubkey.pubkey = buf.read(33).bth
ext_pubkey
end
# import pub key from Base58 private key address
def self.from_base58(address)
ExtPubkey.parse_from_payload(Base58.decode(address).htb)
end
# get version bytes from purpose' value.
def self.version_from_purpose(purpose)
v = purpose - HARDENED_THRESHOLD
case v
when 49
Bitcoin.chain_params.bip49_pubkey_p2wpkh_p2sh_version
when 84
Bitcoin.chain_params.bip84_pubkey_p2wpkh_version
else
Bitcoin.chain_params.extended_pubkey_version
end
end
# check whether +version+ is supported version bytes.
def self.support_version?(version)
p = Bitcoin.chain_params
[p.bip49_pubkey_p2wpkh_p2sh_version, p.bip84_pubkey_p2wpkh_version, p.extended_pubkey_version].include?(version)
end
end
|
caruby/core | lib/caruby/database/persistable.rb | CaRuby.Persistable.saved_attributes_to_fetch | ruby | def saved_attributes_to_fetch(operation)
# only fetch a create, not an update (note that subclasses can override this condition)
if operation.type == :create or operation.autogenerated? then
# Filter the class saved fetch attributes for content.
self.class.saved_attributes_to_fetch.select { |pa| not send(pa).nil_or_empty? }
else
Array::EMPTY_ARRAY
end
end | Returns this domain object's attributes which must be fetched to reflect the database state.
This default implementation returns the {Propertied#autogenerated_logical_dependent_attributes}
if this domain object does not have an identifier, or an empty array otherwise.
Subclasses can override to relax or restrict the condition.
@quirk caCORE the auto-generated criterion is a necessary but not sufficient condition
to determine whether a save caCORE result reflects the database state. Example:
* caTissue SCG event parameters are not auto-generated on SCG create if the SCG collection
status is Pending, but are auto-generated on SCG update if the SCG status is changed
to Complete. By contrast, the SCG specimens are auto-generated on SCG create, even if
the status is +Pending+.
The caBIG application can override this method in a Database subclass to fine-tune the
fetch criteria. Adding a more restrictive {#fetch_saved?} condition will will improve
performance but not change functionality.
@quirk caCORE a saved attribute which is cascaded but not fetched must be fetched in
order to reflect the database identifier in the saved object.
@param [Database::Operation] the save operation
@return [<Symbol>] whether this domain object must be fetched to reflect the database state | train | https://github.com/caruby/core/blob/a682dc57c6fa31aef765cdd206ed3d4b4c289c60/lib/caruby/database/persistable.rb#L328-L336 | module Persistable
# @return [{Symbol => Object}] the content value hash at the point of the last snapshot
attr_reader :snapshot
# @param [Jinx::Resource, <Jinx::Resource>, nil] obj the object(s) to check
# @return [Boolean] whether the given object(s) have an identifier
def self.saved?(obj)
if obj.nil_or_empty? then
false
elsif obj.collection? then
obj.all? { |ref| saved?(ref) }
else
!!obj.identifier
end
end
# @param [Jinx::Resource, <Jinx::Resource>, nil] obj the object(s) to check
# @return [Boolean] whether at least one of the given object(s) does not have an identifier
def self.unsaved?(obj)
not (obj.nil_or_empty? or saved?(obj))
end
# Returns the data access mediator for this domain object.
# Application #{Jinx::Resource} modules are required to override this method.
#
# @return [Database] the data access mediator for this Persistable, if any
# @raise [DatabaseError] if the subclass does not override this method
def database
raise ValidationError.new("#{self} database is missing")
end
# @return [PersistenceService] the database application service for this Persistable
def persistence_service
database.persistence_service(self.class)
end
# Fetches the domain objects which match this template from the {#database}.
#
# @param path (see Reader#query)
# @return (see Reader#query)
# @raise (see #database)
# @raise (see Reader#query)
# @see Reader#query
def query(*path)
path.empty? ? database.query(self) : database.query(self, *path)
end
# Fetches this domain object from the {#database}.
#
# @param opts (see Reader#find)
# @option (see Reader#find)
# @return (see Reader#find)
# @raise (see #database)
# @raise (see Reader#find)
# @see Reader#find
def find(opts=nil)
database.find(self, opts)
end
# Creates this domain object in the {#database}.
#
# @return (see Writer#create)
# @raise (see #database)
# @raise (see Writer#create)
# @see Writer#create
def create
database.create(self)
end
# Creates this domain object, if necessary.
#
# @raise (see Database#ensure_exists)
def ensure_exists
database.ensure_exists(self)
end
# Saves this domain object in the {#database}.
#
# @return (see Writer#save)
# @raise (see #database)
# @raise (see Writer#save)
# @see Writer#save
def save
database.save(self)
end
alias :store :save
# Updates this domain object in the {#database}.
#
# @return (see Writer#update)
# @raise (see #database)
# @raise (see Writer#update)
# @see Writer#update
def update
database.update(self)
end
# Deletes this domain object from the {#database}.
#
# @return (see Writer#delete)
# @raise (see #database)
# @raise (see Writer#delete)
# @see Writer#delete
def delete
database.delete(self)
end
# @return [Boolean] whether this domain object can be updated
# (default is true, subclasses can override)
def updatable?
true
end
alias :== :equal?
alias :eql? :==
# Captures the Persistable's updatable attribute base values.
# The snapshot is subsequently accessible using the {#snapshot} method.
#
# @return [{Symbol => Object}] the snapshot value hash
def take_snapshot
@snapshot = value_hash(self.class.updatable_attributes)
end
# @return [Boolean] whether this Persistable has a {#snapshot}
def fetched?
!!@snapshot
end
# Merges the other domain object non-domain attribute values into this domain object's snapshot,
# An existing snapshot value is replaced by the corresponding other attribute value.
#
# @param [Jinx::Resource] other the source domain object
# @raise [ValidationError] if this domain object does not have a snapshot
def merge_into_snapshot(other)
if @snapshot.nil? then
raise ValidationError.new("Cannot merge #{other.qp} content into #{qp} snapshot, since #{qp} does not have a snapshot.")
end
# the non-domain attribute => [target value, other value] difference hash
delta = diff(other)
# the difference attribute => other value hash, excluding nil other values
dvh = delta.transform_value { |d| d.last }
return if dvh.empty?
logger.debug { "#{qp} differs from database content #{other.qp} as follows: #{delta.filter_on_key { |pa| dvh.has_key?(pa) }.qp}" }
logger.debug { "Setting #{qp} snapshot values from other #{other.qp} values to reflect the database state: #{dvh.qp}..." }
# update the snapshot from the other value to reflect the database state
@snapshot.merge!(dvh)
end
# Returns whether this Persistable either doesn't have a snapshot or has changed since the last snapshot.
# This is a conservative condition test that returns false if there is no snaphsot for this Persistable
# and therefore no basis to determine whether the content changed. If the attribute parameter is given,
# then only that attribute is checked for a change. Otherwise, all attributes are checked.
#
# @param [Symbol, nil] attribute the optional attribute to check.
# @return [Boolean] whether this Persistable's content differs from its snapshot
def changed?(attribute=nil)
@snapshot.nil? or not snapshot_equal_content?(attribute)
end
# @return [<Symbol>] the attributes which differ between the {#snapshot} and current content
def changed_attributes
if @snapshot then
ovh = value_hash(self.class.updatable_attributes)
diff = @snapshot.diff(ovh) { |pa, v, ov| Jinx::Resource.value_equal?(v, ov) }
diff.keys
else
self.class.updatable_attributes
end
end
# Lazy loads the attributes. If a block is given to this method, then the attributes are determined
# by calling the block with this Persistable as a parameter. Otherwise, the default attributes
# are the unfetched domain attributes.
#
# Each of the attributes which does not already hold a non-nil or non-empty value
# will be loaded from the database on demand.
# This method injects attribute value initialization into each loadable attribute reader.
# The initializer is given by either the loader Proc argument.
# The loader takes two arguments, the target object and the attribute to load.
# If this Persistable already has a lazy loader, then this method is a no-op.
#
# Lazy loading is disabled on an attribute after it is invoked on that attribute or when the
# attribute setter method is called.
#
# @param loader [LazyLoader] the lazy loader to add
def add_lazy_loader(loader, attributes=nil)
# guard against invalid call
if identifier.nil? then raise ValidationError.new("Cannot add lazy loader to an unfetched domain object: #{self}") end
# the attributes to lazy-load
attributes ||= loadable_attributes
return if attributes.empty?
# define the reader and writer method overrides for the missing attributes
pas = attributes.select { |pa| inject_lazy_loader(pa) }
logger.debug { "Lazy loader added to #{qp} attributes #{pas.to_series}." } unless pas.empty?
end
# Returns the attributes to load on demand. The base attribute list is given by the
# {Propertied#loadable_attributes} whose value is nil or empty.
# In addition, if this Persistable has more than one {Domain::Dependency#owner_attributes}
# and one is non-nil, then none of the owner attributes are loaded on demand,
# since there can be at most one owner and ownership cannot change.
#
# @return [<Symbol>] the attributes to load on demand
def loadable_attributes
pas = self.class.loadable_attributes.select { |pa| send(pa).nil_or_empty? }
ownr_attrs = self.class.owner_attributes
# If there is an owner, then variant owners are not loaded.
if ownr_attrs.size > 1 and ownr_attrs.any? { |pa| not send(pa).nil_or_empty? } then
pas - ownr_attrs
else
pas
end
end
# Disables lazy loading of the specified attribute. Lazy loaded is disabled for all attributes
# if no attribute is specified. This method is a no-op if this Persistable does not have a lazy
# loader.
#
# @param [Symbol] the attribute to remove from the load list, or nil if to remove all attributes
def remove_lazy_loader(attribute=nil)
if attribute.nil? then
return self.class.domain_attributes.each { |pa| remove_lazy_loader(pa) }
end
# the modified accessor method
reader, writer = self.class.property(attribute).accessors
# remove the reader override
disable_singleton_method(reader)
# remove the writer override
disable_singleton_method(writer)
end
# Wrap +Resource.dump+ to disable the lazy-loader while printing.
def dump
do_without_lazy_loader { super }
end
# Executes the given block with the database lazy loader disabled, if any.
#
# @yield the block to execute
def do_without_lazy_loader(&block)
if database then
database.lazy_loader.disable(&block)
else
yield
end
end
# Validates this domain object and its #{Propertied#unproxied_savable_template_attributes}
# for consistency and completeness prior to a database create operation.
# An object is valid if it contains a non-nil value for each mandatory attribute.
# Objects which have already been validated are skipped.
#
# A Persistable class should not override this method, but override the
# private {#validate_local} method instead.
#
# @return [Persistable] this domain object
# @raise [Jinx::ValidationError] if the object state is invalid
def validate(autogenerated=false)
if (identifier.nil? or autogenerated) and not @validated then
validate_local
@validated = true
end
self.class.unproxied_savable_template_attributes.each do |pa|
send(pa).enumerate { |dep| dep.validate }
end
self
end
# Sets the default attribute values for this auto-generated domain object.
def add_defaults_autogenerated
add_defaults_recursive
end
# @return [Boolean] whether this domain object has {#searchable_attributes}
def searchable?
not searchable_attributes.nil?
end
# Returns the attributes to use for a search using this domain object as a template, determined
# as follows:
# * If this domain object has a non-nil primary key, then the primary key is the search criterion.
# * Otherwise, if this domain object has a secondary key and each key attribute value is not nil,
# then the secondary key is the search criterion.
# * Otherwise, if this domain object has an alternate key and each key attribute value is not nil,
# then the aklternate key is the search criterion.
#
# @return [<Symbol>] the attributes to use for a search on this domain object
def searchable_attributes
key_props = self.class.primary_key_attributes
return key_props if key_searchable?(key_props)
key_props = self.class.secondary_key_attributes
return key_props if key_searchable?(key_props)
key_props = self.class.alternate_key_attributes
return key_props if key_searchable?(key_props)
end
# Returns this domain object's attributes which must be fetched to reflect the database state.
# This default implementation returns the {Propertied#autogenerated_logical_dependent_attributes}
# if this domain object does not have an identifier, or an empty array otherwise.
# Subclasses can override to relax or restrict the condition.
#
# @quirk caCORE the auto-generated criterion is a necessary but not sufficient condition
# to determine whether a save caCORE result reflects the database state. Example:
# * caTissue SCG event parameters are not auto-generated on SCG create if the SCG collection
# status is Pending, but are auto-generated on SCG update if the SCG status is changed
# to Complete. By contrast, the SCG specimens are auto-generated on SCG create, even if
# the status is +Pending+.
# The caBIG application can override this method in a Database subclass to fine-tune the
# fetch criteria. Adding a more restrictive {#fetch_saved?} condition will will improve
# performance but not change functionality.
#
# @quirk caCORE a saved attribute which is cascaded but not fetched must be fetched in
# order to reflect the database identifier in the saved object.
#
# @param [Database::Operation] the save operation
# @return [<Symbol>] whether this domain object must be fetched to reflect the database state
# Relaxes the {#saved_attributes_to_fetch} condition for a SCG as follows:
# * If the SCG status was updated from +Pending+ to +Collected+, then fetch the saved SCG event parameters.
#
# @param (see #saved_attributes_to_fetch)
# @return (see #saved_attributes_to_fetch)
def autogenerated?(operation)
operation == :update && status_changed_to_complete? ? EVENT_PARAM_ATTRS : super
end
def fetch_autogenerated?(operation)
# only fetch a create, not an update (note that subclasses can override this condition)
operation == :update
# Check for an attribute with a value that might need to be changed in order to
# reflect the auto-generated database content.
self.class.autogenerated_logical_dependent_attributes.select { |pa| not send(pa).nil_or_empty? }
end
# Returns whether this domain object must be fetched to reflect the database state.
# This default implementation returns whether this domain object was created and
# there are any autogenerated attributes. Subclasses can override to relax or restrict
# the condition.
#
# @quirk caCORE The auto-generated criterion is a necessary but not sufficient condition
# to determine whether a save caCORE result reflects the database state. Example:
# * caTissue SCG event parameters are not auto-generated on SCG create if the SCG collection
# status is Pending, but are auto-generated on SCG update if the SCG status is changed
# to Complete. By contrast, the SCG specimens are auto-generated on SCG create, even if
# the status is +Pending+.
#
# The caBIG application can override this method in a Database subclass to fine-tune the
# fetch criteria. Adding a more restrictive {#fetch_saved?} condition will will improve
# performance but not change functionality.
#
# @quirk caCORE A saved attribute which is cascaded but not fetched must be fetched in
# order to reflect the database identifier in the saved object.
#
# TODO - this method is no longeer used. Should it be? If not, remove here and in catissue
# subclasses.
#
# @return [Boolean] whether this domain object must be fetched to reflect the database state
def fetch_saved?
# only fetch a create, not an update (note that subclasses can override this condition)
return false if identifier
# Check for an attribute with a value that might need to be changed in order to
# reflect the auto-generated database content.
ag_attrs = self.class.autogenerated_attributes
return false if ag_attrs.empty?
ag_attrs.any? { |pa| not send(pa).nil_or_empty? }
end
# Sets the {Propertied#volatile_nondomain_attributes} to the other fetched value,
# if different.
#
# @param [Jinx::Resource] other the fetched domain object reflecting the database state
def copy_volatile_attributes(other)
pas = self.class.volatile_nondomain_attributes
return if pas.empty?
pas.each do |pa|
val = send(pa)
oval = other.send(pa)
if val.nil? then
# Overwrite the current attribute value.
set_property_value(pa, oval)
logger.debug { "Set #{qp} volatile #{pa} to the fetched #{other.qp} database value #{oval.qp}." }
elsif oval != val and pa == :identifier then
# If this error occurs, then there is a serious match-merge flaw.
raise DatabaseError.new("Can't copy #{other} to #{self} with different identifier")
end
end
logger.debug { "Merged auto-generated attribute values #{pas.to_series} from #{other.qp} into #{self}..." }
end
private
# @return [Boolean] whether the given key attributes is non-empty and each attribute in the key has a non-nil value
def key_searchable?(attributes)
not (attributes.empty? or attributes.any? { |pa| send(pa).nil? })
end
# Returns whether the {#snapshot} and current content are equal.
# The attribute values _v_ and _ov_ of the snapshot and current content, resp., are
# compared with equality determined by {Jinx::Resource.value_equal?}.
#
# @param (see #changed?)
# @return [Boolean] whether the {#snapshot} and current content are equal
def snapshot_equal_content?(attribute=nil)
if attribute then
value = send(attribute)
ssval = @snapshot[attribute]
eq = Jinx::Resource.value_equal?(value, ssval)
unless eq then
logger.debug { "#{qp} #{attribute} snapshot value #{ssval.qp} differs from the current value #{value.qp}." }
end
return eq
end
vh = value_hash(self.class.updatable_attributes)
# KLUDGE TODO - confirm this is still a problem and fix.
# In the Galena frozen migration example, the SpecimenPosition snapshot doesn't include the identifier.
# This bug could be related to the problem of an abstract DomainObject not being added as a domain module class.
# Work around this here by setting the snapshot identifier.
# See the ClinicalTrials::Jinx::Resource rubydoc for more info.
if vh[:identifier] and not @snapshot[:identifier] then
@snapshot[:identifier] = vh[:identifier]
end
# END OF KLUDGE
if @snapshot.size < vh.size then
pa, pv = vh.detect { |a, v| not @snapshot.has_key?(a) }
logger.debug { "#{qp} is missing snapshot #{pa} compared to the current value #{pv.qp}." }
false
elsif @snapshot.size > vh.size then
pa, value = @snapshot.detect { |a, v| not vh.has_key?(a) }
logger.debug { "#{qp} has snapshot #{pa} value #{value.qp} not found in current content." }
false
else
@snapshot.all? do |pa, ssval|
pv = vh[pa]
eq = Jinx::Resource.value_equal?(pv, ssval)
unless eq then
logger.debug { "#{qp} #{pa} snapshot value #{ssval.qp} differs from the current value #{pv.qp}." }
end
eq
end
end
end
# Adds this Persistable lazy loader to the given attribute unless the attribute already holds a
# fetched reference.
#
# @param [Symbol] attribute the attribute to mod
# @return [Boolean] whether a loader was added to the attribute
def inject_lazy_loader(attribute)
# bail if there is already a value
return false if attribute_loaded?(attribute)
# the accessor methods to modify
reader, writer = self.class.property(attribute).accessors
# The singleton attribute reader method loads the reference once and thenceforth calls the
# standard reader.
instance_eval "def #{reader}; load_reference(:#{attribute}); end"
# The singleton attribute writer method removes the lazy loader once and thenceforth calls
# the standard writer.
instance_eval "def #{writer}(value); remove_lazy_loader(:#{attribute}); super; end"
true
end
# @param (see #inject_lazy_loader)
# @return [Boolean] whether the attribute references one or more domain objects, and each
# referenced object has an identifier
def attribute_loaded?(attribute)
value = transient_value(attribute)
return false if value.nil_or_empty?
Enumerable === value ? value.all? { |ref| ref.identifier } : value.identifier
end
# Loads the reference attribute database value into this Persistable.
#
# @param [Symbol] attribute the attribute to load
# @return the attribute value merged from the database value
def load_reference(attribute)
ldr = database.lazy_loader
# bypass the singleton method and call the class instance method if the lazy loader is disabled
return transient_value(attribute) unless ldr.enabled?
# First disable lazy loading for the attribute, since the reader method is called by the loader.
remove_lazy_loader(attribute)
# load the fetched value
merged = ldr.load(self, attribute)
# update dependent snapshots if necessary
pa = self.class.property(attribute)
if pa.dependent? then
# the owner attribute
oattr = pa.inverse
if oattr then
# update dependent snapshot with the owner, since the owner snapshot is taken when fetched but the
# owner might be set when the fetched dependent is merged into the owner dependent attribute.
merged.enumerate do |dep|
if dep.fetched? then
dep.snapshot[oattr] = self
logger.debug { "Updated the #{qp} fetched #{attribute} dependent #{dep.qp} snapshot with #{oattr} value #{qp}." }
end
end
end
end
merged
end
# @param (see #load_reference)
# @return the in-memory attribute value, without invoking the lazy loader
def transient_value(attribute)
self.class.instance_method(attribute).bind(self).call
end
# Disables the given singleton attribute accessor method.
#
# @param [String, Symbol] name_or_sym the accessor method to disable
def disable_singleton_method(name_or_sym)
return unless singleton_methods.include?(name_or_sym.to_s)
# dissociate the method from this instance
method = self.method(name_or_sym.to_sym)
method.unbind
# JRuby unbind doesn't work in JRuby 1.1.6. In that case, redefine the singleton method to delegate
# to the class instance method.
if singleton_methods.include?(name_or_sym.to_s) then
args = (1..method.arity).map { |argnum| "arg#{argnum}" }.join(', ')
instance_eval "def #{name_or_sym}(#{args}); super; end"
end
end
end
|
marcbowes/UsingYAML | lib/using_yaml.rb | UsingYAML.ClassMethods.using_yaml_file | ruby | def using_yaml_file(filename)
# Define an reader for filename such that the corresponding
# YAML file is loaded. Example: using_yaml_file(:foo) will look
# for foo.yml in the specified path.
define_method(filename) do
# Work out the absolute path for the filename and get a handle
# on the cachestore for that file.
pathname = using_yaml_path.join("#{filename}.yml").expand_path
yaml = (@using_yaml_cache ||= {})[pathname]
# If the yaml exists in our cache, then we don't need to hit
# the disk.
return yaml if @using_yaml_cache.has_key? pathname
# Safe disk read which either reads and parses a YAML object
# (and caches it against future reads) or graciously ignores
# the file's existence. Note that an error will appear on
# stderr to avoid typos (or similar) from causing unexpected
# behavior. See +UsingYAML.squelch!+ if you wish to hide the
# error.
begin
@using_yaml_cache[pathname] = UsingYAML.add_extensions(YAML.load_file(pathname), pathname)
rescue Exception => e
$stderr.puts "(UsingYAML) Could not load #{filename}: #{e.message}" unless UsingYAML.squelched?
@using_yaml_cache[pathname] = UsingYAML.add_extensions(nil, pathname)
end
end
# Define a writer for filename such that the incoming object is
# treated as a UsingYAML-ized Hash (magical properties). Be
# aware that the incoming object will not be saved to disk
# unless you explicitly do so.
define_method("#{filename}=".to_sym) do |object|
# Work out the absolute path for the filename and get a handle
# on the cachestore for that file.
pathname = using_yaml_path.join("#{filename}.yml").expand_path
(@using_yaml_cache ||= {})[pathname] = UsingYAML.add_extensions(object, pathname)
end
end | Special attr_accessor for the suppiled +filename+ such that
files are intelligently loaded/written to disk.
using_yaml_file(:foo) # => attr_accessor(:foo) + some magic
If class Example is setup with the above, then:
example = Example.new
example.foo # => loads from foo.yml
example.foo.bar # => equivalent to example.foo['bar']
example.foo.save # => serialises to foo.yml | train | https://github.com/marcbowes/UsingYAML/blob/4485476ad0ad14850d41c8ed61673f7b08b9f007/lib/using_yaml.rb#L133-L171 | module ClassMethods
# Used to configure UsingYAML for a class by defining what files
# should be loaded and from where.
#
# include UsingYAML
# using_yaml :foo, :bar, :path => "/some/where"
#
# +args+ can contain either filenames or a hash which specifices a
# path which contains the corresponding files.
#
# The value of :path must either be a string or Proc (see
# +using_yaml_path+ for more information on overriding paths).
def using_yaml(*args)
# Include the instance methods which provide accessors and
# mutators for reading/writing from/to the YAML objects.
include InstanceMethods
# Each argument is either a filename or a :path option
args.each do |arg|
case arg
when Symbol, String
# Define accessors for this file
using_yaml_file(arg.to_s)
when Hash
# Currently only accepts { :path => ... }
next unless arg.size == 1 && arg.keys.first == :path
# Take note of the path
UsingYAML.path = [self.inspect, arg.values.first]
end
end
end
# Special attr_accessor for the suppiled +filename+ such that
# files are intelligently loaded/written to disk.
#
# using_yaml_file(:foo) # => attr_accessor(:foo) + some magic
#
# If class Example is setup with the above, then:
#
# example = Example.new
# example.foo # => loads from foo.yml
# example.foo.bar # => equivalent to example.foo['bar']
# example.foo.save # => serialises to foo.yml
#
end
|
koraktor/metior | lib/metior/repository.rb | Metior.Repository.file_stats | ruby | def file_stats(range = current_branch)
support! :file_stats
stats = {}
commits(range).each_value do |commit|
commit.added_files.each do |file|
stats[file] = { :modifications => 0 } unless stats.key? file
stats[file][:added_date] = commit.authored_date
stats[file][:modifications] += 1
end
commit.modified_files.each do |file|
stats[file] = { :modifications => 0 } unless stats.key? file
stats[file][:last_modified_date] = commit.authored_date
stats[file][:modifications] += 1
end
commit.deleted_files.each do |file|
stats[file] = { :modifications => 0 } unless stats.key? file
stats[file][:deleted_date] = commit.authored_date
end
end
stats
end | This evaluates basic statistics about the files in a given commit range.
@example
repo.file_stats
=> {
'a_file.rb' => {
:added_date => Tue Mar 29 16:13:47 +0200 2011,
:deleted_date => Sun Jun 05 12:56:18 +0200 2011,
:last_modified_date => Thu Apr 21 20:08:00 +0200 2011,
:modifications => 9
}
}
@param [String, Range] range The range of commits for which the file
stats should be retrieved. This may be given as a string
(`'master..development'`), a range (`'master'..'development'`) or
as a single ref (`'master'`). A single ref name means all commits
reachable from that ref.
@return [Hash<String, Hash<Symbol, Object>>] Each file is returned as a
key in this hash. The value of this key is another hash
containing the stats for this file. Depending on the state of the
file this includes `:added_date`, `:last_modified_date`,
`:last_modified_date` and `'master..development'`.
@see Commit#added_files
@see Commit#deleted_files
@see Commit#modified_files | train | https://github.com/koraktor/metior/blob/02da0f330774c91e1a7325a5a7edbe696f389f95/lib/metior/repository.rb#L179-L201 | class Repository
include AutoIncludeAdapter
# @return [String] The file system path of this repository
attr_reader :path
# Creates a new repository instance with the given file system path
#
# @param [String] path The file system path of the repository
def initialize(path)
@actors = {}
@commits = {}
@description = nil
@name = nil
@path = path
@refs = {}
end
# Returns a single VCS specific actor object from the raw data of the actor
# provided by the VCS implementation
#
# The actor object is either created from the given raw data or retrieved
# from the cache using the VCS specific unique identifier of the actor.
#
# @param [Object] actor The raw data of the actor provided by the VCS
# @return [Actor] A object representing the actor
# @see Actor.id_for
def actor(actor)
id = self.class::Actor.id_for(actor)
@actors[id] ||= self.class::Actor.new(self, actor)
end
# Returns all authors from the given commit range in a hash where the IDs
# of the authors are the keys and the authors are the values
#
# This will call `commits(range)` if the authors for the commit range are
# not known yet.
#
# @param [String, Range] range The range of commits for which the authors
# should be retrieved. This may be given as a string
# (`'master..development'`), a range (`'master'..'development'`) or
# as a single ref (`'master'`). A single ref name means all commits
# reachable from that ref.
# @return [ActorCollection] All authors from the given commit range
# @see #commits
def authors(range = current_branch)
commits(range).authors
end
alias_method :contributors, :authors
# Returns the names of all branches of this repository
#
# @return [Array<String>] The names of all branches
def branches
load_branches.each { |name, id| @refs[name] = id }.keys.sort
end
# Loads all commits including their committers and authors from the given
# commit range
#
# @param [String, Range] range The range of commits for which the commits
# should be retrieved. This may be given as a string
# (`'master..development'`), a range (`'master'..'development'`) or
# as a single ref (`'master'`). A single ref name means all commits
# reachable from that ref.
# @return [CommitCollection] All commits from the given commit range
def commits(range = current_branch)
range = parse_range range
commits = cached_commits range
if commits.empty?
base_commit, raw_commits = load_commits(range)
commits = build_commits raw_commits
unless base_commit.nil?
base_commit = self.class::Commit.new(self, base_commit)
base_commit.add_child commits.last.id
@commits[base_commit.id] = base_commit
end
else
if range.first == ''
unless commits.last.parents.empty?
raw_commits = load_commits(''..commits.last.id).last
commits += build_commits raw_commits[0..-2]
end
else
if commits.first.id != range.last
raw_commits = load_commits(commits.first.id..range.last).last
commits = build_commits(raw_commits) + commits
end
unless commits.last.parents.include? range.first
raw_commits = load_commits(range.first..commits.last.id).last
commits += build_commits raw_commits
end
end
end
CommitCollection.new commits, range
end
# Returns all committers from the given commit range in a hash where the
# IDs of the committers are the keys and the committers are the values
#
# This will call `commits(range)` if the committers for the commit range
# are not known yet.
#
# @param [String, Range] range The range of commits for which the
# committers should be retrieved. This may be given as a string
# (`'master..development'`), a range (`'master'..'development'`) or
# as a single ref (`'master'`). A single ref name means all commits
# reachable from that ref.
# @return [ActorCollection] All committers from the given commit range
# @see #commits
def committers(range = current_branch)
commits(range).committers
end
alias_method :collaborators, :committers
# Returns the current branch of the repository
#
# @abstract Has to be implemented by VCS specific subclasses
# @return [String] The name of the current branch
def current_branch
raise NotImplementedError
end
# Returns the description of the project contained in the repository
#
# This will load the description through a VCS specific mechanism if
# required.
#
# @return [String] The description of the project in the repository
# @see #load_description
def description
load_description if @description.nil?
@description
end
# This evaluates basic statistics about the files in a given commit range.
#
# @example
# repo.file_stats
# => {
# 'a_file.rb' => {
# :added_date => Tue Mar 29 16:13:47 +0200 2011,
# :deleted_date => Sun Jun 05 12:56:18 +0200 2011,
# :last_modified_date => Thu Apr 21 20:08:00 +0200 2011,
# :modifications => 9
# }
# }
# @param [String, Range] range The range of commits for which the file
# stats should be retrieved. This may be given as a string
# (`'master..development'`), a range (`'master'..'development'`) or
# as a single ref (`'master'`). A single ref name means all commits
# reachable from that ref.
# @return [Hash<String, Hash<Symbol, Object>>] Each file is returned as a
# key in this hash. The value of this key is another hash
# containing the stats for this file. Depending on the state of the
# file this includes `:added_date`, `:last_modified_date`,
# `:last_modified_date` and `'master..development'`.
# @see Commit#added_files
# @see Commit#deleted_files
# @see Commit#modified_files
# Returns the unique identifier for the commit the given reference – like a
# branch name – is pointing to
#
# @abstract Has to be implemented by VCS subclasses
# @param [String] ref A symbolic reference name
# @return [Object] The unique identifier of the commit the reference is
# pointing to
def id_for_ref(ref)
raise NotImplementedError
end
# This evaluates the changed lines in each commit of the given commit
# range.
#
# For easier use, the values are stored in separate arrays where each
# number represents the number of changed (i.e. added or deleted) lines in
# one commit.
#
# @example
# repo.line_history
# => { :additions => [10, 5, 0], :deletions => [0, -2, -1] }
# @param [String, Range] range The range of commits for which the commit
# stats should be retrieved. This may be given as a string
# (`'master..development'`), a range (`'master'..'development'`) or
# as a single ref (`'master'`). A single ref name means all commits
# reachable from that ref.
# @return [Hash<Symbol, Array>] Added lines are returned in an `Array`
# assigned to key `:additions`, deleted lines are assigned to
# `:deletions`
# @see CommitCollection#line_history
def line_history(range = current_branch)
commits(range).line_history
end
# Loads the line stats for the commits given by a set of commit IDs
#
# @param [Array<String>] ids The IDs of the commits to load line stats for
# @return [Hash<String, Array<Fixnum>] An array of two number (line
# additions and deletions) for each of the given commit IDs
# @abstract Has to be implemented by VCS specific subclasses
def load_line_stats(ids)
raise NotImplementedError
end
# Returns the name of the project contained in the repository
#
# This will load the name through a VCS specific mechanism if required.
#
# @return [String] The name of the project in the repository
# @see #load_name
def name
load_name if @name.nil?
@name
end
# Create a new report instance for the given report name and commit range
# of this repository
#
# @param [String, Symbol] name The name of the report to load and
# initialize
# @param [String, Range] range The commit range to analyze
# @return [Report] The requested report
def report(name = :default, range = current_branch)
Report.create name, self, range
end
# Returns a list of authors with the biggest impact on the repository, i.e.
# changing the most code
#
# @param [String, Range] range The range of commits for which the authors
# should be retrieved. This may be given as a string
# (`'master..development'`), a range (`'master'..'development'`) or
# as a single ref (`'master'`). A single ref name means all commits
# reachable from that ref.
# @param [Fixnum] count The number of authors to return
# @raise [UnsupportedError] if the VCS does not support `:line_stats`
# @return [Array<Actor>] An array of the given number of the most
# significant authors in the given commit range
def significant_authors(range = current_branch, count = 3)
authors(range).most_significant(count)
end
alias_method :significant_contributors, :significant_authors
# Returns a list of commits with the biggest impact on the repository, i.e.
# changing the most code
#
# @param [String, Range] range The range of commits for which the commits
# should be retrieved. This may be given as a string
# (`'master..development'`), a range (`'master'..'development'`) or
# as a single ref (`'master'`). A single ref name means all commits
# reachable from that ref.
# @param [Fixnum] count The number of commits to return
# @raise [UnsupportedError] if the VCS does not support `:line_stats`
# @return [Array<Actor>] An array of the given number of the most
# significant commits in the given commit range
def significant_commits(range = current_branch, count = 10)
commits(range).most_significant(count)
end
# Returns the names of all tags of this repository
#
# @return [Array<String>] The names of all tags
def tags
load_tags.each { |name, id| @refs[name] = id }.keys.sort
end
# Returns a list of top contributors in the given commit range
#
# This will first have to load all authors (and i.e. commits) from the
# given commit range.
#
# @param [String, Range] range The range of commits for which the top
# contributors should be retrieved. This may be given as a string
# (`'master..development'`), a range (`'master'..'development'`) or
# as a single ref (`'master'`). A single ref name means all commits
# reachable from that ref.
# @param [Fixnum] count The number of contributors to return
# @return [Array<Actor>] An array of the given number of top contributors
# in the given commit range
# @see #authors
def top_authors(range = current_branch, count = 3)
authors(range).top(count)
end
alias_method :top_contributors, :top_authors
private
# Builds VCS specific commit objects for each given commit's raw data that
# is provided by the VCS implementation
#
# The raw data will be transformed into commit objects that will also be
# saved into the commit cache. Authors and committers of the given commits
# will be created and stored into the cache or loaded from the cache if
# they already exist. Additionally this method will establish an
# association between the commits and their children.
#
# @param [Array<Object>] raw_commits The commits' raw data provided by the
# VCS implementation
# @return [Array<Commit>] The commit objects representing the given commits
# @see Commit
# @see Commit#add_child
def build_commits(raw_commits)
child_commit_id = nil
raw_commits.map do |commit|
commit = self.class::Commit.new(self, commit)
commit.add_child child_commit_id unless child_commit_id.nil?
child_commit_id = commit.id
@commits[commit.id] = commit
commit
end
end
# Tries to retrieve as many commits as possible in the given commit range
# from the commit cache
#
# This method calls itself recursively to walk the given commit range
# either from the start to the end or vice versa depending on which commit
# could be found in the cache.
#
# @param [Range] range The range of commits which should be retrieved from
# the cache. This may be given a range of commit IDs
# (`'master'..'development'`).
# @return [Array<Commit>] A list of commit objects that could be retrieved
# from the cache
# @see Commit#children
def cached_commits(range)
commits = []
direction = nil
if @commits.key? range.last
current_commits = [@commits[range.last]]
direction = :parents
elsif @commits.key? range.first
current_commits = [@commits[range.first]]
direction = :children
end
unless direction.nil?
while !current_commits.empty? do
new_commits = []
current_commits.each do |commit|
new_commits += commit.send direction
commits << commit if commit.id != range.first
if direction == :parents && new_commits.include?(range.first)
new_commits = []
break
end
end
unless new_commits.include? range.first
current_commits = new_commits.uniq.map do |commit|
commit = @commits[commit]
commits.include?(commit) ? nil : commit
end.compact
end
end
end
commits.sort_by { |c| c.committed_date }.reverse
end
# Loads all branches and the corresponding commit IDs of this repository
#
# @abstract Has to be implemented by VCS specific subclasses
# @return [Hash<String, Object>] The names of all branches and the
# corresponding commit IDs
def load_branches
raise NotImplementedError
end
# Loads all commits from the given commit range
#
# @abstract Has to be implemented by VCS specific subclasses
# @param [String, Range] range The range of commits for which the commits
# should be retrieved. This may be given as a string
# (`'master..development'`), a range (`'master'..'development'`) or
# as a single ref (`'master'`). A single ref name means all commits
# reachable from that ref.
# @return [Array<Commit>] All commits from the given commit range
def load_commits(range = current_branch)
raise NotImplementedError
end
# Loads the description of the project contained in the repository
#
# @abstract Has to be implemented by VCS specific subclasses
# @see #description
def load_description
raise NotImplementedError
end
# Loads the name of the project contained in the repository
#
# @abstract Has to be implemented by VCS specific subclasses
# @see #description
def load_name
raise NotImplementedError
end
# Loads all tags and the corresponding commit IDs of this repository
#
# @abstract Has to be implemented by VCS specific subclasses
# @return [Hash<String, Object>] The names of all tags and the
# corresponding commit IDs
def load_tags
raise NotImplementedError
end
# Parses a string or range of commit IDs or ref names into the coresponding
# range of unique commit IDs
#
# @param [String, Range] range The string that should be parsed for a range
# or an existing range
# @return [Range] The range of commit IDs parsed from the given parameter
# @see #id_for_ref
def parse_range(range)
unless range.is_a? Range
range = range.to_s.split '..'
range = ((range.size == 1) ? '' : range.first)..range.last
end
range = id_for_ref(range.first)..range.last if range.first != ''
range.first..id_for_ref(range.last)
end
end
|
iyuuya/jkf | lib/jkf/parser/csa.rb | Jkf::Parser.Csa.parse_moves | ruby | def parse_moves
s0 = @current_pos
s1 = parse_firstboard
if s1 != :failed
s2 = []
s3 = parse_move
while s3 != :failed
s2 << s3
s3 = parse_move
end
parse_comments
@reported_pos = s0
s0 = s2.unshift(s1)
else
@current_pos = s0
s0 = :failed
end
s0
end | moves : firstboard move* comment* | train | https://github.com/iyuuya/jkf/blob/4fd229c50737cab7b41281238880f1414e55e061/lib/jkf/parser/csa.rb#L440-L458 | class Csa < Base
protected
# kifu : csa2 | csa1
def parse_root
@input += "\n" unless @input[-1] =~ /\n|\r|,/ # FIXME
s0 = parse_csa2
s0 = parse_csa1 if s0 == :failed
s0
end
# csa2 : version22 information? initialboard moves?
def parse_csa2
s0 = @current_pos
if parse_version22 != :failed
s1 = parse_information
s1 = nil if s1 == :failed
s2 = parse_initial_board
if s2 != :failed
s3 = parse_moves
s3 = nil if s3 == :failed
@reported_pos = s0
s0 = -> (info, ini, ms) do
ret = { "header" => info["header"], "initial" => ini, "moves" => ms }
if info && info["players"]
ret["header"]["先手"] = info["players"][0] if info["players"][0]
ret["header"]["後手"] = info["players"][1] if info["players"][1]
end
ret
end.call(s1, s2, s3)
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# version22 : comment* "V2.2" nl
def parse_version22
s0 = @current_pos
s1 = parse_comments
s2 = match_str("V2.2")
if s2 != :failed
s3 = parse_nl
if s3 != :failed
s0 = [s1, s2, s3]
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# information : players? headers
def parse_information
s0 = @current_pos
s1 = parse_players
s1 = nil if s1 == :failed
s2 = parse_headers
if s2 != :failed
@reported_pos = s0
s0 = { "players" => s1, "header" => s2 }
else
@current_pos = s0
s0 = :failed
end
s0
end
# headers : header*
def parse_headers
s0 = @current_pos
s1 = []
s2 = parse_header
while s2 != :failed
s1 << s2
s2 = parse_header
end
@reported_pos = s0
s0 = -> (header) do
ret = {}
header.each do |data|
ret[normalize_header_key(data["k"])] = data["v"]
end
ret
end.call(s1)
s0
end
# header : comment* "$" [^:]+ ":" nonls nl
def parse_header
s0 = @current_pos
parse_comments
if match_str("$") != :failed
s4 = match_regexp(/^[^:]/)
if s4 != :failed
s3 = []
while s4 != :failed
s3 << s4
s4 = match_regexp(/^[^:]/)
end
else
s3 = :failed
end
if s3 != :failed
if match_str(":") != :failed
s4 = parse_nonls
if parse_nl != :failed
@reported_pos = s0
s0 = { "k" => s3.join, "v" => s4.join }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# csa1 : players? initialboard? moves
def parse_csa1
s0 = @current_pos
s1 = parse_players
s1 = nil if s1 == :failed
s2 = parse_initial_board
s2 = nil if s2 == :failed
s3 = parse_moves
if s3 != :failed
@reported_pos = s0
s0 = -> (ply, ini, ms) do
ret = { "header" => {}, "initial" => ini, "moves" => ms }
if ply
ret["header"]["先手"] = ply[0] if ply[0]
ret["header"]["後手"] = ply[1] if ply[1]
end
ret
end.call(s1, s2, s3)
else
@current_pos = s0
s0 = :failed
end
s0
end
# players : comment* ("N+" nonls nl)? comment* ("N-" nonls nl)?
def parse_players
s0 = @current_pos
parse_comments
s2 = @current_pos
if match_str("N+") != :failed
s4 = parse_nonls
if parse_nl != :failed
@reported_pos = s2
s2 = s4
else
@current_pos = s2
s2 = :failed
end
else
@current_pos = s2
s2 = :failed
end
s2 = nil if s2 == :failed
parse_comments
s4 = @current_pos
if match_str("N-") != :failed
s6 = parse_nonls
if parse_nl != :failed
@reported_pos = s4
s4 = s6
else
@current_pos = s4
s4 = :failed
end
else
@current_pos = s4
s4 = :failed
end
s4 = nil if s4 == :failed
@reported_pos = s0
s0 = [(s2 ? s2.join : nil), (s4 ? s4.join : nil)]
s0
end
# initialboard : comment* (hirate | ikkatsu | "") komabetsu comment* teban nl
def parse_initial_board
s0 = @current_pos
parse_comments
s2 = parse_hirate
if s2 == :failed
s2 = parse_ikkatsu
if s2 == :failed
s2 = @current_pos
s3 = match_str("")
if s3 != :failed
@reported_pos = s2
s3 = "NO"
end
s2 = s3
end
end
if s2 != :failed
s3 = parse_komabetsu
if s3 != :failed
parse_comments
s5 = parse_teban
if s5 != :failed
if parse_nl != :failed
@reported_pos = s0
-> (data, koma, teban) do
if data == "NO"
data = koma
else
data["data"]["hands"] = koma["data"]["hands"]
end
data["data"]["color"] = teban
data
end.call(s2, s3, s5)
else
@current_pos = s0
:failed
end
else
@current_pos = s0
:failed
end
else
@current_pos = s0
:failed
end
else
@current_pos = s0
:failed
end
end
# hirate : "PI" xypiece* nl
def parse_hirate
s0 = @current_pos
if match_str("PI") != :failed
s2 = []
s3 = parse_xy_piece
while s3 != :failed
s2 << s3
s3 = parse_xy_piece
end
if parse_nl != :failed
@reported_pos = s0
s0 = -> (ps) do
ret = { "preset" => "OTHER", "data" => { "board" => get_hirate } }
ps.each do |piece|
ret["data"]["board"][piece["xy"]["x"] - 1][piece["xy"]["y"] - 1] = {}
end
ret
end.call(s2)
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# ikkatsu : ikkatsuline+
def parse_ikkatsu
s0 = @current_pos
s2 = parse_ikkatsu_line
if s2 != :failed
s1 = []
while s2 != :failed
s1 << s2
s2 = parse_ikkatsu_line
end
else
s1 = :failed
end
if s1 != :failed
@reported_pos = s0
s1 = -> (lines) do
board = []
9.times do |i|
line = []
9.times do |j|
line << lines[j][8 - i]
end
board << line
end
{ "preset" => "OTHER", "data" => { "board" => board } }
end.call(s1)
end
s0 = s1
s0
end
# ikkatsuline : "P" [1-9] masu+ nl
def parse_ikkatsu_line
s0 = @current_pos
if match_str("P") != :failed
if match_digit != :failed
s4 = parse_masu
if s4 != :failed
s3 = []
while s4 != :failed
s3 << s4
s4 = parse_masu
end
else
s3 = :failed
end
if s3 != :failed
s4 = parse_nl
if s4 != :failed
@reported_pos = s0
s0 = s3
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# masu : teban piece | " * "
def parse_masu
s0 = @current_pos
s1 = parse_teban
if s1 != :failed
s2 = parse_piece
if s2 != :failed
@reported_pos = s0
s0 = { "color" => s1, "kind" => s2 }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
if s0 == :failed
s0 = @current_pos
if match_str(" * ") != :failed
@reported_pos = s0
s1 = {}
end
s0 = s1
end
s0
end
# komabetsu : komabetsuline*
def parse_komabetsu
s0 = @current_pos
s1 = []
s2 = parse_komabetsu_line
while s2 != :failed
s1 << s2
s2 = parse_komabetsu_line
end
@reported_pos = s0
transform_komabetsu_lines(s1)
end
# komabetsuline : "P" teban xypiece+ nl
def parse_komabetsu_line
s0 = @current_pos
if match_str("P") != :failed
s2 = parse_teban
if s2 != :failed
s4 = parse_xy_piece
if s4 != :failed
s3 = []
while s4 != :failed
s3 << s4
s4 = parse_xy_piece
end
else
s3 = :failed
end
if s3 != :failed
if parse_nl != :failed
@reported_pos = s0
s0 = { "teban" => s2, "pieces" => s3 }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# moves : firstboard move* comment*
# firstboard : comment*
def parse_firstboard
s0 = @current_pos
s1 = parse_comments
@reported_pos = s0
s1.empty? ? {} : { "comments" => s1 }
end
# move : (normalmove | specialmove) time? comment*
def parse_move
s0 = @current_pos
s1 = parse_normal_move
s1 = parse_special_move if s1 == :failed
if s1 != :failed
s2 = parse_time
s2 = nil if s2 == :failed
s3 = parse_comments
@reported_pos = s0
s0 = -> (move, time, comments) do
ret = {}
ret["comments"] = comments if !comments.empty?
ret["time"] = time if time
if move["special"]
ret["special"] = move["special"]
else
ret["move"] = move
end
ret
end.call(s1, s2, s3)
else
@current_pos = s0
s0 = :failed
end
s0
end
# normalmove : teban xy xy piece nl
def parse_normal_move
s0 = @current_pos
s1 = parse_teban
if s1 != :failed
s2 = parse_xy
if s2 != :failed
s3 = parse_xy
if s3 != :failed
s4 = parse_piece
if s4 != :failed
if parse_nl != :failed
@reported_pos = s0
s0 = -> (color, from, to, piece) do
ret = { "color" => color, "to" => to, "piece" => piece }
ret["from"] = from if from["x"] != 0
ret
end.call(s1, s2, s3, s4)
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# specialmove : "%" [-+_A-Z]+ nl
def parse_special_move
s0 = @current_pos
s1 = match_str("%")
if s1 != :failed
s3 = match_regexp(/^[\-+_A-Z]/)
if s3 != :failed
s2 = []
while s3 != :failed
s2 << s3
s3 = match_regexp(/^[\-+_A-Z]/)
end
else
s2 = :failed
end
if s2 != :failed
if parse_nl != :failed
@reported_pos = s0
s0 = { "special" => s2.join }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# teban : "+" | "-"
def parse_teban
s0 = @current_pos
s1 = match_str("+")
if s1 != :failed
@reported_pos = s0
s1 = 0
end
s0 = s1
if s0 == :failed
s0 = @current_pos
s1 = match_str("-")
if s1 != :failed
@reported_pos = s0
s1 = 1
end
s0 = s1
end
s0
end
# comment : "'" nonls nl
def parse_comment
s0 = @current_pos
if match_str("'") != :failed
s2 = parse_nonls
if parse_nl != :failed
@reported_pos = s0
s2.join
else
@current_pos = s0
:failed
end
else
@current_pos = s0
:failed
end
end
# comments : comment*
def parse_comments
stack = []
matched = parse_comment
while matched != :failed
stack << matched
matched = parse_comment
end
stack
end
# time : "T" [0-9]* nl
def parse_time
s0 = @current_pos
if match_str("T") != :failed
s2 = match_digits
if parse_nl != :failed
@reported_pos = s0
s0 = { "now" => sec2time(s2.join.to_i) }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# xy : [0-9] [0-9]
def parse_xy
s0 = @current_pos
s1 = match_digit
if s1 != :failed
s2 = match_digit
if s2 != :failed
@reported_pos = s0
s0 = { "x" => s1.to_i, "y" => s2.to_i }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# piece : [A-Z] [A-Z]
def parse_piece
s0 = @current_pos
s1 = match_regexp(/^[A-Z]/)
if s1 != :failed
s2 = match_regexp(/^[A-Z]/)
if s2 != :failed
@reported_pos = s0
s0 = s1 + s2
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# xypiece : xy piece
def parse_xy_piece
s0 = @current_pos
s1 = parse_xy
if s1 != :failed
s2 = parse_piece
if s2 != :failed
@reported_pos = s0
s0 = { "xy" => s1, "piece" => s2 }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# nl : ("\r"? "\n") | " "* ","
def parse_nl
s0 = @current_pos
s1 = match_str("\r")
s1 = nil if s1 == :failed
s2 = match_str("\n")
if s2 != :failed
s0 = [s1, s2]
else
@current_pos = s0
s0 = :failed
end
if s0 == :failed
s0 = @current_pos
s1 = match_spaces
s2 = match_str(",")
if s2 != :failed
s0 = [s1, s2]
else
@current_pos = s0
s0 = :failed
end
end
s0
end
# nonl : [^\r\n]
def parse_nonl
match_regexp(/^[^\r\n]/)
end
# nonls : nonl*
def parse_nonls
stack = []
matched = parse_nonl
while matched != :failed
stack << matched
matched = parse_nonl
end
stack
end
# lines to jkf
def transform_komabetsu_lines(lines)
board = generate_empty_board
hands = [
{ "FU" => 0, "KY" => 0, "KE" => 0, "GI" => 0, "KI" => 0, "KA" => 0, "HI" => 0 },
{ "FU" => 0, "KY" => 0, "KE" => 0, "GI" => 0, "KI" => 0, "KA" => 0, "HI" => 0 }
]
all = { "FU" => 18, "KY" => 4, "KE" => 4, "GI" => 4, "KI" => 4, "KA" => 2, "HI" => 2 }
lines.each do |line|
line["pieces"].each do |piece|
xy = piece["xy"]
if xy["x"] == 0
if piece["piece"] == "AL"
hands[line["teban"]] = all
return { "preset" => "OTHER", "data" => { "board" => board, "hands" => hands } }
end
obj = hands[line["teban"]]
obj[piece["piece"]] += 1
else
board[xy["x"] - 1][xy["y"] - 1] = { "color" => line["teban"],
"kind" => piece["piece"] }
end
all[piece["piece"]] -= 1 if piece["piece"] != "OU"
end
end
{ "preset" => "OTHER", "data" => { "board" => board, "hands" => hands } }
end
# return empty board jkf
def generate_empty_board
board = []
9.times do |_i|
line = []
9.times do |_j|
line << {}
end
board << line
end
board
end
# sec to time(m, s)
def sec2time(sec)
s = sec % 60
m = (sec - s) / 60
{ "m" => m, "s" => s }
end
# return hirate board jkf
def get_hirate
[
[{ "color" => 1, "kind" => "KY" }, {}, { "color" => 1, "kind" => "FU" }, {}, {}, {},
{ "color" => 0, "kind" => "FU" }, {}, { "color" => 0, "kind" => "KY" }],
[{ "color" => 1, "kind" => "KE" }, { "color" => 1, "kind" => "KA" },
{ "color" => 1, "kind" => "FU" }, {}, {}, {}, { "color" => 0, "kind" => "FU" },
{ "color" => 0, "kind" => "HI" }, { "color" => 0, "kind" => "KE" }],
[{ "color" => 1, "kind" => "GI" }, {}, { "color" => 1, "kind" => "FU" }, {}, {}, {},
{ "color" => 0, "kind" => "FU" }, {}, { "color" => 0, "kind" => "GI" }],
[{ "color" => 1, "kind" => "KI" }, {}, { "color" => 1, "kind" => "FU" }, {}, {}, {},
{ "color" => 0, "kind" => "FU" }, {}, { "color" => 0, "kind" => "KI" }],
[{ "color" => 1, "kind" => "OU" }, {}, { "color" => 1, "kind" => "FU" }, {}, {}, {},
{ "color" => 0, "kind" => "FU" }, {}, { "color" => 0, "kind" => "OU" }],
[{ "color" => 1, "kind" => "KI" }, {}, { "color" => 1, "kind" => "FU" }, {}, {}, {},
{ "color" => 0, "kind" => "FU" }, {}, { "color" => 0, "kind" => "KI" }],
[{ "color" => 1, "kind" => "GI" }, {}, { "color" => 1, "kind" => "FU" }, {}, {}, {},
{ "color" => 0, "kind" => "FU" }, {}, { "color" => 0, "kind" => "GI" }],
[{ "color" => 1, "kind" => "KE" }, { "color" => 1, "kind" => "HI" },
{ "color" => 1, "kind" => "FU" }, {}, {}, {}, { "color" => 0, "kind" => "FU" },
{ "color" => 0, "kind" => "KA" }, { "color" => 0, "kind" => "KE" }],
[{ "color" => 1, "kind" => "KY" }, {}, { "color" => 1, "kind" => "FU" }, {}, {}, {},
{ "color" => 0, "kind" => "FU" }, {}, { "color" => 0, "kind" => "KY" }]
]
end
# normalize header key
def normalize_header_key(key)
{
"EVENT" => "棋戦",
"SITE" => "場所",
"START_TIME" => "開始日時",
"END_TIME" => "終了日時",
"TIME_LIMIT" => "持ち時間"
}[key] || key
end
end
|
intridea/hashie | lib/hashie/mash.rb | Hashie.Mash.custom_reader | ruby | def custom_reader(key)
default_proc.call(self, key) if default_proc && !key?(key)
value = regular_reader(convert_key(key))
yield value if block_given?
value
end | Retrieves an attribute set in the Mash. Will convert
any key passed in to a string before retrieving. | train | https://github.com/intridea/hashie/blob/da9fd39a0e551e09c1441cb7453c969a4afbfd7f/lib/hashie/mash.rb#L144-L149 | class Mash < Hash
include Hashie::Extensions::PrettyInspect
include Hashie::Extensions::RubyVersionCheck
ALLOWED_SUFFIXES = %w[? ! = _].freeze
class CannotDisableMashWarnings < StandardError
def initialize
super(
'You cannot disable warnings on the base Mash class. ' \
'Please subclass the Mash and disable it in the subclass.'
)
end
end
# Disable the logging of warnings based on keys conflicting keys/methods
#
# @api semipublic
# @return [void]
def self.disable_warnings
raise CannotDisableMashWarnings if self == Hashie::Mash
@disable_warnings = true
end
# Checks whether this class disables warnings for conflicting keys/methods
#
# @api semipublic
# @return [Boolean]
def self.disable_warnings?
@disable_warnings ||= false
end
# Inheritance hook that sets class configuration when inherited.
#
# @api semipublic
# @return [void]
def self.inherited(subclass)
super
subclass.disable_warnings if disable_warnings?
end
def self.load(path, options = {})
@_mashes ||= new
return @_mashes[path] if @_mashes.key?(path)
raise ArgumentError, "The following file doesn't exist: #{path}" unless File.file?(path)
parser = options.fetch(:parser) { Hashie::Extensions::Parsers::YamlErbParser }
@_mashes[path] = new(parser.perform(path, options.except(:parser))).freeze
end
def to_module(mash_method_name = :settings)
mash = self
Module.new do |m|
m.send :define_method, mash_method_name.to_sym do
mash
end
end
end
def with_accessors!
extend Hashie::Extensions::Mash::DefineAccessors
end
alias to_s inspect
# If you pass in an existing hash, it will
# convert it to a Mash including recursively
# descending into arrays and hashes, converting
# them as well.
def initialize(source_hash = nil, default = nil, &blk)
deep_update(source_hash) if source_hash
default ? super(default) : super(&blk)
end
class << self; alias [] new; end
alias regular_reader []
alias regular_writer []=
# Retrieves an attribute set in the Mash. Will convert
# any key passed in to a string before retrieving.
# Sets an attribute in the Mash. Key will be converted to
# a string before it is set, and Hashes will be converted
# into Mashes for nesting purposes.
def custom_writer(key, value, convert = true) #:nodoc:
key_as_symbol = (key = convert_key(key)).to_sym
log_built_in_message(key_as_symbol) if log_collision?(key_as_symbol)
regular_writer(key, convert ? convert_value(value) : value)
end
alias [] custom_reader
alias []= custom_writer
# This is the bang method reader, it will return a new Mash
# if there isn't a value already assigned to the key requested.
def initializing_reader(key)
ck = convert_key(key)
regular_writer(ck, self.class.new) unless key?(ck)
regular_reader(ck)
end
# This is the under bang method reader, it will return a temporary new Mash
# if there isn't a value already assigned to the key requested.
def underbang_reader(key)
ck = convert_key(key)
if key?(ck)
regular_reader(ck)
else
self.class.new
end
end
def fetch(key, *args)
super(convert_key(key), *args)
end
def delete(key)
super(convert_key(key))
end
def values_at(*keys)
super(*keys.map { |key| convert_key(key) })
end
alias regular_dup dup
# Duplicates the current mash as a new mash.
def dup
self.class.new(self, default, &default_proc)
end
alias regular_key? key?
def key?(key)
super(convert_key(key))
end
alias has_key? key?
alias include? key?
alias member? key?
# Performs a deep_update on a duplicate of the
# current mash.
def deep_merge(other_hash, &blk)
dup.deep_update(other_hash, &blk)
end
alias merge deep_merge
# Recursively merges this mash with the passed
# in hash, merging each hash in the hierarchy.
def deep_update(other_hash, &blk)
other_hash.each_pair do |k, v|
key = convert_key(k)
if v.is_a?(::Hash) && key?(key) && regular_reader(key).is_a?(Mash)
custom_reader(key).deep_update(v, &blk)
else
value = convert_value(v, true)
value = convert_value(yield(key, self[k], value), true) if blk && key?(k)
custom_writer(key, value, false)
end
end
self
end
alias deep_merge! deep_update
alias update deep_update
alias merge! update
# Assigns a value to a key
def assign_property(name, value)
self[name] = value
end
# Performs a shallow_update on a duplicate of the current mash
def shallow_merge(other_hash)
dup.shallow_update(other_hash)
end
# Merges (non-recursively) the hash from the argument,
# changing the receiving hash
def shallow_update(other_hash)
other_hash.each_pair do |k, v|
regular_writer(convert_key(k), convert_value(v, true))
end
self
end
def replace(other_hash)
(keys - other_hash.keys).each { |key| delete(key) }
other_hash.each { |key, value| self[key] = value }
self
end
def respond_to_missing?(method_name, *args)
return true if key?(method_name)
suffix = method_suffix(method_name)
if suffix
true
else
super
end
end
def prefix_method?(method_name)
method_name = method_name.to_s
method_name.end_with?(*ALLOWED_SUFFIXES) && key?(method_name.chop)
end
def method_missing(method_name, *args, &blk) # rubocop:disable Style/MethodMissing
return self.[](method_name, &blk) if key?(method_name)
name, suffix = method_name_and_suffix(method_name)
case suffix
when '='.freeze
assign_property(name, args.first)
when '?'.freeze
!!self[name]
when '!'.freeze
initializing_reader(name)
when '_'.freeze
underbang_reader(name)
else
self[method_name]
end
end
# play nice with ActiveSupport Array#extract_options!
def extractable_options?
true
end
# another ActiveSupport method, see issue #270
def reverse_merge(other_hash)
self.class.new(other_hash).merge(self)
end
with_minimum_ruby('2.3.0') do
def dig(*keys)
super(*keys.map { |key| convert_key(key) })
end
end
protected
def method_name_and_suffix(method_name)
method_name = method_name.to_s
if method_name.end_with?(*ALLOWED_SUFFIXES)
[method_name[0..-2], method_name[-1]]
else
[method_name[0..-1], nil]
end
end
def method_suffix(method_name)
method_name = method_name.to_s
method_name[-1] if method_name.end_with?(*ALLOWED_SUFFIXES)
end
def convert_key(key) #:nodoc:
key.to_s
end
def convert_value(val, duping = false) #:nodoc:
case val
when self.class
val.dup
when Hash
duping ? val.dup : val
when ::Hash
val = val.dup if duping
self.class.new(val)
when Array
val.map { |e| convert_value(e) }
when ::Array
Array.new(val.map { |e| convert_value(e) })
else
val
end
end
private
def log_built_in_message(method_key)
return if self.class.disable_warnings?
method_information = Hashie::Utils.method_information(method(method_key))
Hashie.logger.warn(
'You are setting a key that conflicts with a built-in method ' \
"#{self.class}##{method_key} #{method_information}. " \
'This can cause unexpected behavior when accessing the key as a ' \
'property. You can still access the key via the #[] method.'
)
end
def log_collision?(method_key)
respond_to?(method_key) && !self.class.disable_warnings? &&
!(regular_key?(method_key) || regular_key?(method_key.to_s))
end
end
|
kmuto/review | lib/review/latexbuilder.rb | ReVIEW.LATEXBuilder.inline_list | ruby | def inline_list(id)
chapter, id = extract_chapter_id(id)
if get_chap(chapter).nil?
macro('reviewlistref', I18n.t('format_number_without_chapter', [chapter.list(id).number]))
else
macro('reviewlistref', I18n.t('format_number', [get_chap(chapter), chapter.list(id).number]))
end
rescue KeyError
error "unknown list: #{id}"
end | FIXME: use TeX native label/ref. | train | https://github.com/kmuto/review/blob/77d1273e671663f05db2992281fd891b776badf0/lib/review/latexbuilder.rb#L934-L943 | class LATEXBuilder < Builder
include LaTeXUtils
include TextUtils
%i[dtp hd_chap].each do |e|
Compiler.definline(e)
end
Compiler.defsingle(:latextsize, 1)
def extname
'.tex'
end
def builder_init_file
@chapter.book.image_types = %w[.ai .eps .pdf .tif .tiff .png .bmp .jpg .jpeg .gif]
@blank_needed = false
@latex_tsize = nil
@tsize = nil
@table_caption = nil
@cellwidth = nil
@ol_num = nil
@first_line_num = nil
@sec_counter = SecCounter.new(5, @chapter)
@foottext = {}
setup_index
initialize_metachars(@book.config['texcommand'])
end
private :builder_init_file
def setup_index
@index_db = {}
@index_mecab = nil
return true unless @book.config['pdfmaker']['makeindex']
if @book.config['pdfmaker']['makeindex_dic']
@index_db = load_idxdb(@book.config['pdfmaker']['makeindex_dic'])
end
return true unless @book.config['pdfmaker']['makeindex_mecab']
begin
begin
require 'MeCab'
rescue LoadError
require 'mecab'
end
require 'nkf'
@index_mecab = MeCab::Tagger.new(@book.config['pdfmaker']['makeindex_mecab_opts'])
rescue LoadError
error 'not found MeCab'
end
end
def load_idxdb(file)
table = {}
File.foreach(file) do |line|
key, value = *line.strip.split(/\t+/, 2)
table[key] = value
end
table
end
def blank
@blank_needed = true
end
private :blank
def print(*s)
if @blank_needed
@output.puts
@blank_needed = false
end
super
end
private :print
def puts(*s)
if @blank_needed
@output.puts
@blank_needed = false
end
super
end
private :puts
def result
if @chapter.is_a?(ReVIEW::Book::Part) && [email protected]_version('2', exception: false)
puts '\end{reviewpart}'
end
@output.string
end
HEADLINE = {
1 => 'chapter',
2 => 'section',
3 => 'subsection',
4 => 'subsubsection',
5 => 'paragraph',
6 => 'subparagraph'
}.freeze
def headline(level, label, caption)
_, anchor = headline_prefix(level)
headline_name = HEADLINE[level]
if @chapter.is_a?(ReVIEW::Book::Part)
if @book.config.check_version('2', exception: false)
headline_name = 'part'
elsif level == 1
headline_name = 'part'
puts '\begin{reviewpart}'
end
end
prefix = ''
if level > @book.config['secnolevel'] || (@chapter.number.to_s.empty? && level > 1)
prefix = '*'
end
blank unless @output.pos == 0
@doc_status[:caption] = true
puts macro(headline_name + prefix, compile_inline(caption))
@doc_status[:caption] = nil
if prefix == '*' && level <= @book.config['toclevel'].to_i
puts "\\addcontentsline{toc}{#{headline_name}}{#{compile_inline(caption)}}"
end
if level == 1
puts macro('label', chapter_label)
else
puts macro('label', sec_label(anchor))
puts macro('label', label) if label
end
rescue
error "unknown level: #{level}"
end
def nonum_begin(level, _label, caption)
blank unless @output.pos == 0
@doc_status[:caption] = true
puts macro(HEADLINE[level] + '*', compile_inline(caption))
@doc_status[:caption] = nil
puts macro('addcontentsline', 'toc', HEADLINE[level], compile_inline(caption))
end
def nonum_end(level)
end
def notoc_begin(level, _label, caption)
blank unless @output.pos == 0
@doc_status[:caption] = true
puts macro(HEADLINE[level] + '*', compile_inline(caption))
@doc_status[:caption] = nil
end
def notoc_end(level)
end
def nodisp_begin(level, _label, caption)
if @output.pos != 0
blank
else
puts macro('clearpage')
end
puts macro('addcontentsline', 'toc', HEADLINE[level], compile_inline(caption))
# FIXME: headings
end
def nodisp_end(level)
end
def column_begin(level, label, caption)
blank
@doc_status[:column] = true
target = nil
if label
target = "\\hypertarget{#{column_label(label)}}{}"
else
target = "\\hypertarget{#{column_label(caption)}}{}"
end
@doc_status[:caption] = true
if @book.config.check_version('2', exception: false)
puts '\\begin{reviewcolumn}'
puts target
puts macro('reviewcolumnhead', nil, compile_inline(caption))
else
# ver.3
print '\\begin{reviewcolumn}'
puts "[#{compile_inline(caption)}#{target}]"
end
@doc_status[:caption] = nil
if level <= @book.config['toclevel'].to_i
puts "\\addcontentsline{toc}{#{HEADLINE[level]}}{#{compile_inline(caption)}}"
end
end
def column_end(_level)
puts '\\end{reviewcolumn}'
blank
@doc_status[:column] = nil
end
def captionblock(type, lines, caption)
if @book.config.check_version('2', exception: false)
type = 'minicolumn'
end
print "\\begin{review#{type}}"
@doc_status[:caption] = true
if @book.config.check_version('2', exception: false)
puts
if caption.present?
puts "\\reviewminicolumntitle{#{compile_inline(caption)}}"
end
else
if caption.present?
print "[#{compile_inline(caption)}]"
end
puts
end
@doc_status[:caption] = nil
blocked_lines = split_paragraph(lines)
puts blocked_lines.join("\n\n")
puts "\\end{review#{type}}"
end
def box(lines, caption = nil)
blank
puts macro('reviewboxcaption', compile_inline(caption)) if caption.present?
puts '\begin{reviewbox}'
lines.each do |line|
puts detab(line)
end
puts '\end{reviewbox}'
blank
end
def ul_begin
blank
puts '\begin{itemize}'
end
def ul_item(lines)
str = lines.join
str.sub!(/\A(\[)/) { '\lbrack{}' }
puts '\item ' + str
end
def ul_end
puts '\end{itemize}'
blank
end
def ol_begin
blank
puts '\begin{enumerate}'
return true unless @ol_num
puts "\\setcounter{enumi}{#{@ol_num - 1}}"
@ol_num = nil
end
def ol_item(lines, _num)
str = lines.join
str.sub!(/\A(\[)/) { '\lbrack{}' }
puts '\item ' + str
end
def ol_end
puts '\end{enumerate}'
blank
end
def dl_begin
blank
puts '\begin{description}'
end
def dt(str)
str.sub!(/\[/) { '\lbrack{}' }
str.sub!(/\]/) { '\rbrack{}' }
puts '\item[' + str + '] \mbox{} \\\\'
end
def dd(lines)
puts lines.join
end
def dl_end
puts '\end{description}'
blank
end
def paragraph(lines)
blank
lines.each do |line|
puts line
end
blank
end
def parasep
puts '\\parasep'
end
def read(lines)
latex_block 'quotation', lines
end
alias_method :lead, :read
def highlight_listings?
@book.config['highlight'] && @book.config['highlight']['latex'] == 'listings'
end
private :highlight_listings?
def emlist(lines, caption = nil, lang = nil)
blank
if highlight_listings?
common_code_block_lst(nil, lines, 'reviewemlistlst', 'title', caption, lang)
else
common_code_block(nil, lines, 'reviewemlist', caption, lang) { |line, _idx| detab(line) + "\n" }
end
end
def emlistnum(lines, caption = nil, lang = nil)
blank
first_line_num = line_num
if highlight_listings?
common_code_block_lst(nil, lines, 'reviewemlistnumlst', 'title', caption, lang, first_line_num: first_line_num)
else
common_code_block(nil, lines, 'reviewemlist', caption, lang) { |line, idx| detab((idx + first_line_num).to_s.rjust(2) + ': ' + line) + "\n" }
end
end
## override Builder#list
def list(lines, id, caption, lang = nil)
if highlight_listings?
common_code_block_lst(id, lines, 'reviewlistlst', 'caption', caption, lang)
else
common_code_block(id, lines, 'reviewlist', caption, lang) { |line, _idx| detab(line) + "\n" }
end
end
## override Builder#listnum
def listnum(lines, id, caption, lang = nil)
first_line_num = line_num
if highlight_listings?
common_code_block_lst(id, lines, 'reviewlistnumlst', 'caption', caption, lang, first_line_num: first_line_num)
else
common_code_block(id, lines, 'reviewlist', caption, lang) { |line, idx| detab((idx + first_line_num).to_s.rjust(2) + ': ' + line) + "\n" }
end
end
def cmd(lines, caption = nil, lang = nil)
if highlight_listings?
common_code_block_lst(nil, lines, 'reviewcmdlst', 'title', caption, lang)
else
blank
common_code_block(nil, lines, 'reviewcmd', caption, lang) { |line, _idx| detab(line) + "\n" }
end
end
def common_code_block(id, lines, command, caption, _lang)
@doc_status[:caption] = true
unless @book.config.check_version('2', exception: false)
puts '\\begin{reviewlistblock}'
end
if caption.present?
if command =~ /emlist/ || command =~ /cmd/ || command =~ /source/
puts macro(command + 'caption', compile_inline(caption))
else
begin
if get_chap.nil?
puts macro('reviewlistcaption', "#{I18n.t('list')}#{I18n.t('format_number_header_without_chapter', [@chapter.list(id).number])}#{I18n.t('caption_prefix')}#{compile_inline(caption)}")
else
puts macro('reviewlistcaption', "#{I18n.t('list')}#{I18n.t('format_number_header', [get_chap, @chapter.list(id).number])}#{I18n.t('caption_prefix')}#{compile_inline(caption)}")
end
rescue KeyError
error "no such list: #{id}"
end
end
end
@doc_status[:caption] = nil
body = ''
lines.each_with_index do |line, idx|
body.concat(yield(line, idx))
end
puts macro('begin', command)
print body
puts macro('end', command)
unless @book.config.check_version('2', exception: false)
puts '\\end{reviewlistblock}'
end
blank
end
def common_code_block_lst(_id, lines, command, title, caption, lang, first_line_num: 1)
if title == 'title' && caption.blank? && @book.config.check_version('2', exception: false)
print '\vspace{-1.5em}'
end
body = lines.inject('') { |i, j| i + detab(unescape(j)) + "\n" }
args = make_code_block_args(title, caption, lang, first_line_num: first_line_num)
puts %Q(\\begin{#{command}}[#{args}])
print body
puts %Q(\\end{#{command}})
blank
end
def make_code_block_args(title, caption, lang, first_line_num: 1)
caption_str = compile_inline((caption || ''))
if title == 'title' && caption_str == '' && @book.config.check_version('2', exception: false)
caption_str = '\relax' ## dummy charactor to remove lstname
end
lexer = if @book.config['highlight'] && @book.config['highlight']['lang']
@book.config['highlight']['lang'] # default setting
else
''
end
lexer = lang if lang.present?
args = "language={#{lexer}}"
if title == 'title' && caption_str == ''
# ignore
else
args = "#{title}={#{caption_str}}," + args
end
if first_line_num != 1
args << ",firstnumber=#{first_line_num}"
end
args
end
def source(lines, caption = nil, lang = nil)
if highlight_listings?
common_code_block_lst(nil, lines, 'reviewsourcelst', 'title', caption, lang)
else
common_code_block(nil, lines, 'reviewsource', caption, lang) { |line, _idx| detab(line) + "\n" }
end
end
def image_header(id, caption)
end
def handle_metric(str)
if @book.config['image_scale2width'] && str =~ /\Ascale=([\d.]+)\Z/
return "width=#{$1}\\maxwidth"
end
str
end
def result_metric(array)
array.join(',')
end
def image_image(id, caption, metric)
metrics = parse_metric('latex', metric)
# image is always bound here
puts "\\begin{reviewimage}%%#{id}"
if metrics.present?
puts "\\includegraphics[#{metrics}]{#{@chapter.image(id).path}}"
else
puts "\\includegraphics[width=\\maxwidth]{#{@chapter.image(id).path}}"
end
@doc_status[:caption] = true
if @book.config.check_version('2', exception: false)
puts macro('caption', compile_inline(caption)) if caption.present?
else
puts macro('reviewimagecaption', compile_inline(caption)) if caption.present?
end
@doc_status[:caption] = nil
puts macro('label', image_label(id))
puts '\end{reviewimage}'
end
def image_dummy(id, caption, lines)
warn "image not bound: #{id}"
puts '\begin{reviewdummyimage}'
# path = @chapter.image(id).path
puts "--[[path = #{id} (#{existence(id)})]]--"
lines.each do |line|
puts detab(line.rstrip)
end
puts macro('label', image_label(id))
@doc_status[:caption] = true
if @book.config.check_version('2', exception: false)
puts macro('caption', compile_inline(caption)) if caption.present?
else
puts macro('reviewimagecaption', compile_inline(caption)) if caption.present?
end
@doc_status[:caption] = nil
puts '\end{reviewdummyimage}'
end
def existence(id)
@chapter.image(id).bound? ? 'exist' : 'not exist'
end
private :existence
def image_label(id, chapter = nil)
chapter ||= @chapter
"image:#{chapter.id}:#{id}"
end
private :image_label
def chapter_label
"chap:#{@chapter.id}"
end
private :chapter_label
def sec_label(sec_anchor)
"sec:#{sec_anchor}"
end
private :sec_label
def table_label(id, chapter = nil)
chapter ||= @chapter
"table:#{chapter.id}:#{id}"
end
private :table_label
def bib_label(id)
"bib:#{id}"
end
private :bib_label
def column_label(id, chapter = @chapter)
filename = chapter.id
num = chapter.column(id).number
"column:#{filename}:#{num}"
end
private :column_label
def indepimage(lines, id, caption = nil, metric = nil)
metrics = parse_metric('latex', metric)
if @chapter.image(id).path
puts "\\begin{reviewimage}%%#{id}"
if metrics.present?
puts "\\includegraphics[#{metrics}]{#{@chapter.image(id).path}}"
else
puts "\\includegraphics[width=\\maxwidth]{#{@chapter.image(id).path}}"
end
else
warn "image not bound: #{id}"
puts '\begin{reviewdummyimage}'
puts "--[[path = #{id} (#{existence(id)})]]--"
lines.each do |line|
puts detab(line.rstrip)
end
end
@doc_status[:caption] = true
if caption.present?
puts macro('reviewindepimagecaption',
%Q(#{I18n.t('numberless_image')}#{I18n.t('caption_prefix')}#{compile_inline(caption)}))
end
@doc_status[:caption] = nil
if @chapter.image(id).path
puts '\end{reviewimage}'
else
puts '\end{reviewdummyimage}'
end
end
alias_method :numberlessimage, :indepimage
def table(lines, id = nil, caption = nil)
rows = []
sepidx = nil
lines.each_with_index do |line, idx|
if /\A[\=\{\-\}]{12}/ =~ line
# just ignore
# error "too many table separator" if sepidx
sepidx ||= idx
next
end
rows.push(line.strip.split(/\t+/).map { |s| s.sub(/\A\./, '') })
end
rows = adjust_n_cols(rows)
begin
table_header(id, caption) if caption.present?
rescue KeyError
error "no such table: #{id}"
end
return if rows.empty?
table_begin(rows.first.size)
if sepidx
sepidx.times do
cno = -1
tr(rows.shift.map do |s|
cno += 1
th(s, @cellwidth[cno])
end)
end
rows.each do |cols|
cno = -1
tr(cols.map do |s|
cno += 1
td(s, @cellwidth[cno])
end)
end
else
rows.each do |cols|
h, *cs = *cols
cno = 0
tr([th(h, @cellwidth[0])] +
cs.map do |s|
cno += 1
td(s, @cellwidth[cno])
end)
end
end
table_end
end
def table_header(id, caption)
if id.nil?
if caption.present?
@table_caption = true
@doc_status[:caption] = true
if @book.config.check_version('2', exception: false)
puts "\\begin{table}[h]%%#{id}"
else
puts "\\begin{table}%%#{id}"
end
puts macro('reviewtablecaption*', compile_inline(caption))
@doc_status[:caption] = nil
end
else
if caption.present?
@table_caption = true
@doc_status[:caption] = true
if @book.config.check_version('2', exception: false)
puts "\\begin{table}[h]%%#{id}"
else
puts "\\begin{table}%%#{id}"
end
puts macro('reviewtablecaption', compile_inline(caption))
@doc_status[:caption] = nil
end
puts macro('label', table_label(id))
end
end
def table_begin(ncols)
if @latex_tsize
@tsize = @latex_tsize
end
if @tsize
if @tsize =~ /\A[\d., ]+\Z/
@cellwidth = @tsize.split(/\s*,\s*/)
@cellwidth.collect! { |i| "p{#{i}mm}" }
puts macro('begin', 'reviewtable', '|' + @cellwidth.join('|') + '|')
else
@cellwidth = separate_tsize(@tsize)
puts macro('begin', 'reviewtable', @tsize)
end
else
puts macro('begin', 'reviewtable', (['|'] * (ncols + 1)).join('l'))
@cellwidth = ['l'] * ncols
end
puts '\\hline'
end
def separate_tsize(size)
ret = []
s = ''
brace = nil
size.split('').each do |ch|
case ch
when '|'
next
when '{'
brace = true
s << ch
when '}'
brace = nil
s << ch
ret << s
s = ''
else
if brace
s << ch
else
if s.empty?
s << ch
else
ret << s
s = ch
end
end
end
end
unless s.empty?
ret << s
end
ret
end
def table_separator
# puts '\hline'
end
def th(s, cellwidth = 'l')
if /\\\\/ =~ s
if [email protected]_version('2', exception: false) && cellwidth =~ /\{/
macro('reviewth', s.gsub("\\\\\n", '\\newline{}'))
else
## use shortstack for @<br>
macro('reviewth', macro('shortstack[l]', s))
end
else
macro('reviewth', s)
end
end
def td(s, cellwidth = 'l')
if /\\\\/ =~ s
if [email protected]_version('2', exception: false) && cellwidth =~ /\{/
s.gsub("\\\\\n", '\\newline{}')
else
## use shortstack for @<br>
macro('shortstack[l]', s)
end
else
s
end
end
def tr(rows)
print rows.join(' & ')
puts ' \\\\ \hline'
end
def table_end
puts macro('end', 'reviewtable')
puts '\end{table}' if @table_caption
@table_caption = nil
@tsize = nil
@latex_tsize = nil
@cellwidth = nil
blank
end
def emtable(lines, caption = nil)
table(lines, nil, caption)
end
def imgtable(lines, id, caption = nil, metric = nil)
unless @chapter.image(id).bound?
warn "image not bound: #{id}"
image_dummy id, caption, lines
return
end
begin
if caption.present?
@table_caption = true
@doc_status[:caption] = true
puts "\\begin{table}[h]%%#{id}"
puts macro('reviewimgtablecaption', compile_inline(caption))
@doc_status[:caption] = nil
end
puts macro('label', table_label(id))
rescue ReVIEW::KeyError
error "no such table: #{id}"
end
imgtable_image(id, caption, metric)
puts '\end{table}' if @table_caption
@table_caption = nil
blank
end
def imgtable_image(id, _caption, metric)
metrics = parse_metric('latex', metric)
# image is always bound here
puts "\\begin{reviewimage}%%#{id}"
if metrics.present?
puts "\\includegraphics[#{metrics}]{#{@chapter.image(id).path}}"
else
puts "\\includegraphics[width=\\maxwidth]{#{@chapter.image(id).path}}"
end
puts '\end{reviewimage}'
end
def quote(lines)
latex_block 'quote', lines
end
def center(lines)
latex_block 'center', lines
end
alias_method :centering, :center
def flushright(lines)
latex_block 'flushright', lines
end
def texequation(lines, id = nil, caption = '')
blank
if id
puts macro('begin', 'reviewequationblock')
if get_chap.nil?
puts macro('reviewequationcaption', "#{I18n.t('equation')}#{I18n.t('format_number_header_without_chapter', [@chapter.equation(id).number])}#{I18n.t('caption_prefix')}#{compile_inline(caption)}")
else
puts macro('reviewequationcaption', "#{I18n.t('equation')}#{I18n.t('format_number_header', [get_chap, @chapter.equation(id).number])}#{I18n.t('caption_prefix')}#{compile_inline(caption)}")
end
end
puts macro('begin', 'equation*')
lines.each do |line|
puts unescape(line)
end
puts macro('end', 'equation*')
if id
puts macro('end', 'reviewequationblock')
end
blank
end
def latex_block(type, lines)
blank
puts macro('begin', type)
blocked_lines = split_paragraph(lines)
puts blocked_lines.join("\n\n")
puts macro('end', type)
blank
end
private :latex_block
def direct(lines, fmt)
return unless fmt == 'latex'
lines.each do |line|
puts line
end
end
def comment(lines, comment = nil)
return true unless @book.config['draft']
lines ||= []
unless comment.blank?
lines.unshift escape(comment)
end
str = lines.join('\par ')
puts macro('pdfcomment', str)
end
def hr
puts '\hrule'
end
def label(id)
puts macro('label', id)
end
def pagebreak
puts '\pagebreak'
end
def blankline
puts '\vspace*{\baselineskip}'
end
def noindent
print '\noindent'
end
def inline_chapref(id)
title = super
if @book.config['chapterlink']
"\\hyperref[chap:#{id}]{#{title}}"
else
title
end
rescue KeyError
error "unknown chapter: #{id}"
nofunc_text("[UnknownChapter:#{id}]")
end
def inline_chap(id)
if @book.config['chapterlink']
"\\hyperref[chap:#{id}]{#{@book.chapter_index.number(id)}}"
else
@book.chapter_index.number(id)
end
rescue KeyError
error "unknown chapter: #{id}"
nofunc_text("[UnknownChapter:#{id}]")
end
def inline_title(id)
title = super
if @book.config['chapterlink']
"\\hyperref[chap:#{id}]{#{title}}"
else
title
end
rescue KeyError
error "unknown chapter: #{id}"
nofunc_text("[UnknownChapter:#{id}]")
end
def inline_pageref(id)
"\\pageref{#{id}}"
end
# FIXME: use TeX native label/ref.
def inline_table(id)
chapter, id = extract_chapter_id(id)
if get_chap(chapter).nil?
macro('reviewtableref', I18n.t('format_number_without_chapter', [chapter.table(id).number]), table_label(id, chapter))
else
macro('reviewtableref', I18n.t('format_number', [get_chap(chapter), chapter.table(id).number]), table_label(id, chapter))
end
rescue KeyError
error "unknown table: #{id}"
end
def inline_img(id)
chapter, id = extract_chapter_id(id)
if get_chap(chapter).nil?
macro('reviewimageref', I18n.t('format_number_without_chapter', [chapter.image(id).number]), image_label(id, chapter))
else
macro('reviewimageref', I18n.t('format_number', [get_chap(chapter), chapter.image(id).number]), image_label(id, chapter))
end
rescue KeyError
error "unknown image: #{id}"
end
def inline_eq(id)
chapter, id = extract_chapter_id(id)
if get_chap(chapter).nil?
macro('reviewequationref', I18n.t('format_number_without_chapter', [chapter.equation(id).number]))
else
macro('reviewequationref', I18n.t('format_number', [get_chap(chapter), chapter.equation(id).number]))
end
rescue KeyError
error "unknown equation: #{id}"
end
def footnote(id, content)
if @book.config['footnotetext'] || @foottext[id]
puts macro("footnotetext[#{@chapter.footnote(id).number}]", compile_inline(content.strip))
end
end
def inline_fn(id)
if @book.config['footnotetext']
macro("footnotemark[#{@chapter.footnote(id).number}]", '')
elsif @doc_status[:caption] || @doc_status[:table] || @doc_status[:column]
@foottext[id] = @chapter.footnote(id).number
macro('protect\\footnotemark', '')
else
macro('footnote', compile_inline(@chapter.footnote(id).content.strip))
end
rescue KeyError
error "unknown footnote: #{id}"
end
BOUTEN = '・'.freeze
def inline_bou(str)
str.split(//).map { |c| macro('ruby', escape(c), macro('textgt', BOUTEN)) }.join('\allowbreak')
end
def compile_ruby(base, ruby)
macro('ruby', escape(base), escape(ruby).gsub('\\textbar{}', '|'))
end
# math
def inline_m(str)
if @book.config.check_version('2', exception: false)
" $#{str}$ "
else
"$#{str}$"
end
end
# hidden index
def inline_hi(str)
index(str)
end
# index -> italic
def inline_i(str)
if @book.config.check_version('2', exception: false)
macro('textit', escape(str))
else
macro('reviewit', escape(str))
end
end
# index
def inline_idx(str)
escape(str) + index(str)
end
# hidden index
def inline_hidx(str)
index(str)
end
# bold
def inline_b(str)
if @book.config.check_version('2', exception: false)
macro('textbf', escape(str))
else
macro('reviewbold', escape(str))
end
end
# line break
def inline_br(_str)
"\\\\\n"
end
def inline_dtp(_str)
# ignore
''
end
## @<code> is same as @<tt>
def inline_code(str)
if @book.config.check_version('2', exception: false)
macro('texttt', escape(str))
else
macro('reviewcode', escape(str))
end
end
def nofunc_text(str)
escape(str)
end
def inline_tt(str)
if @book.config.check_version('2', exception: false)
macro('texttt', escape(str))
else
macro('reviewtt', escape(str))
end
end
def inline_del(str)
macro('reviewstrike', escape(str))
end
def inline_tti(str)
if @book.config.check_version('2', exception: false)
macro('texttt', macro('textit', escape(str)))
else
macro('reviewtti', escape(str))
end
end
def inline_ttb(str)
if @book.config.check_version('2', exception: false)
macro('texttt', macro('textbf', escape(str)))
else
macro('reviewttb', escape(str))
end
end
def inline_bib(id)
macro('reviewbibref', "[#{@chapter.bibpaper(id).number}]", bib_label(id))
end
def inline_hd_chap(chap, id)
n = chap.headline_index.number(id)
if n.present? && chap.number && over_secnolevel?(n)
str = I18n.t('hd_quote', [chap.headline_index.number(id), compile_inline(chap.headline(id).caption)])
else
str = I18n.t('hd_quote_without_number', compile_inline(chap.headline(id).caption))
end
if @book.config['chapterlink']
anchor = n.gsub(/\./, '-')
macro('reviewsecref', str, sec_label(anchor))
else
str
end
end
def inline_column_chap(chapter, id)
macro('reviewcolumnref',
I18n.t('column', compile_inline(chapter.column(id).caption)),
column_label(id, chapter))
rescue KeyError
error "unknown column: #{id}"
end
def inline_raw(str)
super(str)
end
def inline_sub(str)
macro('textsubscript', escape(str))
end
def inline_sup(str)
macro('textsuperscript', escape(str))
end
def inline_em(str)
macro('reviewem', escape(str))
end
def inline_strong(str)
macro('reviewstrong', escape(str))
end
def inline_u(str)
macro('reviewunderline', escape(str))
end
def inline_ami(str)
macro('reviewami', escape(str))
end
def inline_icon(id)
if @chapter.image(id).path
macro('includegraphics', @chapter.image(id).path)
else
warn "image not bound: #{id}"
"\\verb|--[[path = #{id} (#{existence(id)})]]--|"
end
end
def inline_uchar(str)
if @texcompiler && @texcompiler.start_with?('platex')
# with otf package
macro('UTF', escape(str))
else
# passthrough
[str.to_i(16)].pack('U')
end
end
def inline_comment(str)
if @book.config['draft']
macro('pdfcomment', escape(str))
else
''
end
end
def inline_tcy(str)
macro('rensuji', escape(str))
end
def inline_balloon(str)
macro('reviewballoon', escape(str))
end
def bibpaper_header(id, caption)
puts "[#{@chapter.bibpaper(id).number}] #{compile_inline(caption)}"
puts macro('label', bib_label(id))
end
def bibpaper_bibpaper(_id, _caption, lines)
print split_paragraph(lines).join
puts ''
end
def index(str)
sa = str.split('<<>>')
sa.map! do |item|
if @index_db[item]
escape_index(escape(@index_db[item])) + '@' + escape_index(escape(item))
else
if item =~ /\A[[:ascii:]]+\Z/ || @index_mecab.nil?
esc_item = escape_index(escape(item))
if esc_item != item
"#{escape_index(item)}@#{esc_item}"
else
esc_item
end
else
yomi = NKF.nkf('-w --hiragana', @index_mecab.parse(item).force_encoding('UTF-8').chomp)
escape_index(escape(yomi)) + '@' + escape_index(escape(item))
end
end
end
"\\index{#{sa.join('!')}}"
end
def compile_kw(word, alt)
if alt
macro('reviewkw', escape(word)) + "(#{escape(alt.strip)})"
else
macro('reviewkw', escape(word))
end
end
def compile_href(url, label)
if /\A[a-z]+:/ =~ url
if label
macro('href', escape_url(url), escape(label))
else
macro('url', escape_url(url))
end
else
macro('ref', url)
end
end
def latextsize(str)
@latex_tsize = str
end
def image_ext
'pdf'
end
def olnum(num)
@ol_num = num.to_i
end
end
|
visoft/ruby_odata | lib/ruby_odata/service.rb | OData.Service.find_id_metadata | ruby | def find_id_metadata(collection_name)
collection_data = @collections.fetch(collection_name)
class_metadata = @class_metadata.fetch(collection_data[:type].to_s)
key = class_metadata.select{|k,h| h.is_key }.collect{|k,h| h.name }[0]
class_metadata[key]
end | Finds the metadata associated with the given collection's first id property
Remarks: This is used for single item lookup queries using the ID, e.g. Products(1), not complex primary keys
@param [String] collection_name the name of the collection | train | https://github.com/visoft/ruby_odata/blob/ca3d441494aa2f745c7f7fb2cd90173956f73663/lib/ruby_odata/service.rb#L199-L204 | class Service
attr_reader :classes, :class_metadata, :options, :collections, :edmx, :function_imports, :response
# Creates a new instance of the Service class
#
# @param [String] service_uri the root URI of the OData service
# @param [Hash] options the options to pass to the service
# @option options [String] :username for http basic auth
# @option options [String] :password for http basic auth
# @option options [Object] :verify_ssl false if no verification, otherwise mode (OpenSSL::SSL::VERIFY_PEER is default)
# @option options [Hash] :rest_options a hash of rest-client options that will be passed to all OData::Resource.new calls
# @option options [Hash] :additional_params a hash of query string params that will be passed on all calls
# @option options [Boolean, true] :eager_partial true if queries should consume partial feeds until the feed is complete, false if explicit calls to next must be performed
def initialize(service_uri, options = {})
@uri = service_uri.gsub!(/\/?$/, '')
set_options! options
default_instance_vars!
set_namespaces
build_collections_and_classes
end
# Handles the dynamic `AddTo<EntityName>` methods as well as the collections on the service
def method_missing(name, *args)
# Queries
if @collections.include?(name.to_s)
@query = build_collection_query_object(name,@additional_params, *args)
return @query
# Adds
elsif name.to_s =~ /^AddTo(.*)/
type = $1
if @collections.include?(type)
@save_operations << Operation.new("Add", $1, args[0])
else
super
end
elsif @function_imports.include?(name.to_s)
execute_import_function(name.to_s, args)
else
super
end
end
# Queues an object for deletion. To actually remove it from the server, you must call save_changes as well.
#
# @param [Object] obj the object to mark for deletion
#
# @raise [NotSupportedError] if the `obj` isn't a tracked entity
def delete_object(obj)
type = obj.class.to_s
if obj.respond_to?(:__metadata) && !obj.send(:__metadata).nil?
@save_operations << Operation.new("Delete", type, obj)
else
raise OData::NotSupportedError.new "You cannot delete a non-tracked entity"
end
end
# Queues an object for update. To actually update it on the server, you must call save_changes as well.
#
# @param [Object] obj the object to queue for update
#
# @raise [NotSupportedError] if the `obj` isn't a tracked entity
def update_object(obj)
type = obj.class.to_s
if obj.respond_to?(:__metadata) && !obj.send(:__metadata).nil?
@save_operations << Operation.new("Update", type, obj)
else
raise OData::NotSupportedError.new "You cannot update a non-tracked entity"
end
end
# Performs save operations (Create/Update/Delete) against the server
def save_changes
return nil if @save_operations.empty?
result = nil
begin
if @save_operations.length == 1
result = single_save(@save_operations[0])
else
result = batch_save(@save_operations)
end
# TODO: We should probably perform a check here
# to make sure everything worked before clearing it out
@save_operations.clear
return result
rescue Exception => e
handle_exception(e)
end
end
# Performs query operations (Read) against the server.
# Typically this returns an array of record instances, except in the case of count queries
# @raise [ServiceError] if there is an error when talking to the service
def execute
begin
@response = OData::Resource.new(build_query_uri, @rest_options).get
rescue Exception => e
handle_exception(e)
end
return Integer(@response.body) if @response.body =~ /\A\d+\z/
handle_collection_result(@response.body)
end
# Overridden to identify methods handled by method_missing
def respond_to?(method)
if @collections.include?(method.to_s)
return true
# Adds
elsif method.to_s =~ /^AddTo(.*)/
type = $1
if @collections.include?(type)
return true
else
super
end
# Function Imports
elsif @function_imports.include?(method.to_s)
return true
else
super
end
end
# Retrieves the next resultset of a partial result (if any). Does not honor the `:eager_partial` option.
def next
return if not partial?
handle_partial
end
# Does the most recent collection returned represent a partial collection? Will aways be false if a query hasn't executed, even if the query would have a partial
def partial?
@has_partial
end
# Lazy loads a navigation property on a model
#
# @param [Object] obj the object to fill
# @param [String] nav_prop the navigation property to fill
#
# @raise [NotSupportedError] if the `obj` isn't a tracked entity
# @raise [ArgumentError] if the `nav_prop` isn't a valid navigation property
def load_property(obj, nav_prop)
raise NotSupportedError, "You cannot load a property on an entity that isn't tracked" if obj.send(:__metadata).nil?
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property" unless obj.respond_to?(nav_prop.to_sym)
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property" unless @class_metadata[obj.class.to_s][nav_prop].nav_prop
results = OData::Resource.new(build_load_property_uri(obj, nav_prop), @rest_options).get
prop_results = build_classes_from_result(results.body)
obj.send "#{nav_prop}=", (singular?(nav_prop) ? prop_results.first : prop_results)
end
# Adds a child object to a parent object's collection
#
# @param [Object] parent the parent object
# @param [String] nav_prop the name of the navigation property to add the child to
# @param [Object] child the child object
# @raise [NotSupportedError] if the `parent` isn't a tracked entity
# @raise [ArgumentError] if the `nav_prop` isn't a valid navigation property
# @raise [NotSupportedError] if the `child` isn't a tracked entity
def add_link(parent, nav_prop, child)
raise NotSupportedError, "You cannot add a link on an entity that isn't tracked (#{parent.class})" if parent.send(:__metadata).nil?
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property for #{parent.class}" unless parent.respond_to?(nav_prop.to_sym)
raise ArgumentError, "'#{nav_prop}' is not a valid navigation property for #{parent.class}" unless @class_metadata[parent.class.to_s][nav_prop].nav_prop
raise NotSupportedError, "You cannot add a link on a child entity that isn't tracked (#{child.class})" if child.send(:__metadata).nil?
@save_operations << Operation.new("AddLink", nav_prop, parent, child)
end
private
# Constructs a QueryBuilder instance for a collection using the arguments provided.
#
# @param [String] name the name of the collection
# @param [Hash] additional_parameters the additional parameters
# @param [Array] args the arguments to use for query
def build_collection_query_object(name, additional_parameters, *args)
root = "/#{name.to_s}"
if args.empty?
#nothing to add
elsif args.size == 1
if args.first.to_s =~ /\d+/
id_metadata = find_id_metadata(name.to_s)
root << build_id_path(args.first, id_metadata)
else
root << "(#{args.first})"
end
else
root << "(#{args.join(',')})"
end
QueryBuilder.new(root, additional_parameters)
end
# Finds the metadata associated with the given collection's first id property
# Remarks: This is used for single item lookup queries using the ID, e.g. Products(1), not complex primary keys
#
# @param [String] collection_name the name of the collection
# Builds the ID expression of a given id for query
#
# @param [Object] id_value the actual value to be used
# @param [PropertyMetadata] id_metadata the property metadata object for the id
def build_id_path(id_value, id_metadata)
if id_metadata.type == "Edm.Int64"
"(#{id_value}L)"
else
"(#{id_value})"
end
end
def set_options!(options)
@options = options
if @options[:eager_partial].nil?
@options[:eager_partial] = true
end
@rest_options = { :verify_ssl => get_verify_mode, :user => @options[:username], :password => @options[:password] }
@rest_options.merge!(options[:rest_options] || {})
@additional_params = options[:additional_params] || {}
@namespace = options[:namespace]
@json_type = options[:json_type] || 'application/json'
end
def default_instance_vars!
@collections = {}
@function_imports = {}
@save_operations = []
@has_partial = false
@next_uri = nil
end
def set_namespaces
@edmx = Nokogiri::XML(OData::Resource.new(build_metadata_uri, @rest_options).get.body)
@ds_namespaces = {
"m" => "http://schemas.microsoft.com/ado/2007/08/dataservices/metadata",
"edmx" => "http://schemas.microsoft.com/ado/2007/06/edmx",
"ds" => "http://schemas.microsoft.com/ado/2007/08/dataservices",
"atom" => "http://www.w3.org/2005/Atom"
}
# Get the edm namespace from the edmx
edm_ns = @edmx.xpath("edmx:Edmx/edmx:DataServices/*", @namespaces).first.namespaces['xmlns'].to_s
@ds_namespaces.merge! "edm" => edm_ns
end
# Gets ssl certificate verification mode, or defaults to verify_peer
def get_verify_mode
if @options[:verify_ssl].nil?
return OpenSSL::SSL::VERIFY_PEER
else
return @options[:verify_ssl]
end
end
# Build the classes required by the metadata
def build_collections_and_classes
@classes = Hash.new
@class_metadata = Hash.new # This is used to store property information about a class
# Build complex types first, these will be used for entities
complex_types = @edmx.xpath("//edm:ComplexType", @ds_namespaces) || []
complex_types.each do |c|
name = qualify_class_name(c['Name'])
props = c.xpath(".//edm:Property", @ds_namespaces)
methods = props.collect { |p| p['Name'] } # Standard Properties
@classes[name] = ClassBuilder.new(name, methods, [], self, @namespace).build unless @classes.keys.include?(name)
end
entity_types = @edmx.xpath("//edm:EntityType", @ds_namespaces)
entity_types.each do |e|
next if e['Abstract'] == "true"
klass_name = qualify_class_name(e['Name'])
methods = collect_properties(klass_name, e, @edmx)
nav_props = collect_navigation_properties(klass_name, e, @edmx)
@classes[klass_name] = ClassBuilder.new(klass_name, methods, nav_props, self, @namespace).build unless @classes.keys.include?(klass_name)
end
# Fill in the collections instance variable
collections = @edmx.xpath("//edm:EntityContainer/edm:EntitySet", @ds_namespaces)
collections.each do |c|
entity_type = c["EntityType"]
@collections[c["Name"]] = { :edmx_type => entity_type, :type => convert_to_local_type(entity_type) }
end
build_function_imports
end
# Parses the function imports and fills the @function_imports collection
def build_function_imports
# Fill in the function imports
functions = @edmx.xpath("//edm:EntityContainer/edm:FunctionImport", @ds_namespaces)
functions.each do |f|
http_method_attribute = f.xpath("@m:HttpMethod", @ds_namespaces).first # HttpMethod is no longer required http://www.odata.org/2011/10/actions-in-odata/
is_side_effecting_attribute = f.xpath("@edm:IsSideEffecting", @ds_namespaces).first
http_method = 'POST' # default to POST
if http_method_attribute
http_method = http_method_attribute.content
elsif is_side_effecting_attribute
is_side_effecting = is_side_effecting_attribute.content
http_method = is_side_effecting ? 'POST' : 'GET'
end
return_type = f["ReturnType"]
inner_return_type = nil
unless return_type.nil?
return_type = (return_type =~ /^Collection/) ? Array : convert_to_local_type(return_type)
if f["ReturnType"] =~ /\((.*)\)/
inner_return_type = convert_to_local_type($~[1])
end
end
params = f.xpath("edm:Parameter", @ds_namespaces)
parameters = nil
if params.length > 0
parameters = {}
params.each do |p|
parameters[p["Name"]] = p["Type"]
end
end
@function_imports[f["Name"]] = {
:http_method => http_method,
:return_type => return_type,
:inner_return_type => inner_return_type,
:parameters => parameters }
end
end
# Converts the EDMX model type to the local model type
def convert_to_local_type(edmx_type)
return edm_to_ruby_type(edmx_type) if edmx_type =~ /^Edm/
klass_name = qualify_class_name(edmx_type.split('.').last)
klass_name.camelize.constantize
end
# Converts a class name to its fully qualified name (if applicable) and returns the new name
def qualify_class_name(klass_name)
unless @namespace.nil? || @namespace.blank? || klass_name.include?('::')
namespaces = @namespace.split(/\.|::/)
namespaces << klass_name
klass_name = namespaces.join '::'
end
klass_name.camelize
end
# Builds the metadata need for each property for things like feed customizations and navigation properties
def build_property_metadata(props, keys=[])
metadata = {}
props.each do |property_element|
prop_meta = PropertyMetadata.new(property_element)
prop_meta.is_key = keys.include?(prop_meta.name)
# If this is a navigation property, we need to add the association to the property metadata
prop_meta.association = Association.new(property_element, @edmx) if prop_meta.nav_prop
metadata[prop_meta.name] = prop_meta
end
metadata
end
# Handle parsing of OData Atom result and return an array of Entry classes
def handle_collection_result(result)
results = build_classes_from_result(result)
while partial? && @options[:eager_partial]
results.concat handle_partial
end
results
end
# Handles errors from the OData service
def handle_exception(e)
raise e unless defined?(e.response) && e.response != nil
code = e.response[:status]
error = Nokogiri::XML(e.response[:body])
message = if error.xpath("m:error/m:message", @ds_namespaces).first
error.xpath("m:error/m:message", @ds_namespaces).first.content
else
"Server returned error but no message."
end
raise ServiceError.new(code), message
end
# Loops through the standard properties (non-navigation) for a given class and returns the appropriate list of methods
def collect_properties(klass_name, element, doc)
props = element.xpath(".//edm:Property", @ds_namespaces)
key_elemnts = element.xpath(".//edm:Key//edm:PropertyRef", @ds_namespaces)
keys = key_elemnts.collect { |k| k['Name'] }
@class_metadata[klass_name] = build_property_metadata(props, keys)
methods = props.collect { |p| p['Name'] }
unless element["BaseType"].nil?
base = element["BaseType"].split(".").last()
baseType = doc.xpath("//edm:EntityType[@Name=\"#{base}\"]", @ds_namespaces).first()
props = baseType.xpath(".//edm:Property", @ds_namespaces)
@class_metadata[klass_name].merge!(build_property_metadata(props))
methods = methods.concat(props.collect { |p| p['Name']})
end
methods
end
# Similar to +collect_properties+, but handles the navigation properties
def collect_navigation_properties(klass_name, element, doc)
nav_props = element.xpath(".//edm:NavigationProperty", @ds_namespaces)
@class_metadata[klass_name].merge!(build_property_metadata(nav_props))
nav_props.collect { |p| p['Name'] }
end
# Helper to loop through a result and create an instance for each entity in the results
def build_classes_from_result(result)
doc = Nokogiri::XML(result)
is_links = doc.at_xpath("/ds:links", @ds_namespaces)
return parse_link_results(doc) if is_links
entries = doc.xpath("//atom:entry[not(ancestor::atom:entry)]", @ds_namespaces)
extract_partial(doc)
results = []
entries.each do |entry|
results << entry_to_class(entry)
end
return results
end
# Converts an XML Entry into a class
def entry_to_class(entry)
# Retrieve the class name from the fully qualified name (the last string after the last dot)
klass_name = entry.xpath("./atom:category/@term", @ds_namespaces).to_s.split('.')[-1]
# Is the category missing? See if there is a title that we can use to build the class
if klass_name.nil?
title = entry.xpath("./atom:title", @ds_namespaces).first
return nil if title.nil?
klass_name = title.content.to_s
end
return nil if klass_name.nil?
properties = entry.xpath("./atom:content/m:properties/*", @ds_namespaces)
klass = @classes[qualify_class_name(klass_name)].new
# Fill metadata
meta_id = entry.xpath("./atom:id", @ds_namespaces)[0].content
klass.send :__metadata=, { :uri => meta_id }
# Fill properties
for prop in properties
prop_name = prop.name
klass.send "#{prop_name}=", parse_value_xml(prop)
end
# Fill properties represented outside of the properties collection
@class_metadata[qualify_class_name(klass_name)].select { |k,v| v.fc_keep_in_content == false }.each do |k, meta|
if meta.fc_target_path == "SyndicationTitle"
title = entry.xpath("./atom:title", @ds_namespaces).first
klass.send "#{meta.name}=", title.content
elsif meta.fc_target_path == "SyndicationSummary"
summary = entry.xpath("./atom:summary", @ds_namespaces).first
klass.send "#{meta.name}=", summary.content
end
end
inline_links = entry.xpath("./atom:link[m:inline]", @ds_namespaces)
for link in inline_links
# TODO: Use the metadata's associations to determine the multiplicity instead of this "hack"
property_name = link.attributes['title'].to_s
if singular?(property_name)
inline_entry = link.xpath("./m:inline/atom:entry", @ds_namespaces).first
inline_klass = build_inline_class(klass, inline_entry, property_name)
klass.send "#{property_name}=", inline_klass
else
inline_classes, inline_entries = [], link.xpath("./m:inline/atom:feed/atom:entry", @ds_namespaces)
for inline_entry in inline_entries
# Build the class
inline_klass = entry_to_class(inline_entry)
# Add the property to the temp collection
inline_classes << inline_klass
end
# Assign the array of classes to the property
property_name = link.xpath("@title", @ds_namespaces)
klass.send "#{property_name}=", inline_classes
end
end
klass
end
# Tests for and extracts the next href of a partial
def extract_partial(doc)
next_links = doc.xpath('//atom:link[@rel="next"]', @ds_namespaces)
@has_partial = next_links.any?
if @has_partial
uri = Addressable::URI.parse(next_links[0]['href'])
uri.query_values = uri.query_values.merge @additional_params unless @additional_params.empty?
@next_uri = uri.to_s
end
end
def handle_partial
if @next_uri
result = OData::Resource.new(@next_uri, @rest_options).get
results = handle_collection_result(result.body)
end
results
end
# Handle link results
def parse_link_results(doc)
uris = doc.xpath("/ds:links/ds:uri", @ds_namespaces)
results = []
uris.each do |uri_el|
link = uri_el.content
results << URI.parse(link)
end
results
end
# Build URIs
def build_metadata_uri
uri = "#{@uri}/$metadata"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_query_uri
"#{@uri}#{@query.query}"
end
def build_save_uri(operation)
uri = "#{@uri}/#{operation.klass_name}"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_add_link_uri(operation)
uri = operation.klass.send(:__metadata)[:uri].dup
uri << "/$links/#{operation.klass_name}"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_resource_uri(operation)
uri = operation.klass.send(:__metadata)[:uri].dup
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_batch_uri
uri = "#{@uri}/$batch"
uri << "?#{@additional_params.to_query}" unless @additional_params.empty?
uri
end
def build_load_property_uri(obj, property)
uri = obj.__metadata[:uri].dup
uri << "/#{property}"
uri
end
def build_function_import_uri(name, params)
uri = "#{@uri}/#{name}"
params.merge! @additional_params
uri << "?#{params.to_query}" unless params.empty?
uri
end
def build_inline_class(klass, entry, property_name)
# Build the class
inline_klass = entry_to_class(entry)
# Add the property
klass.send "#{property_name}=", inline_klass
end
# Used to link a child object to its parent and vice-versa after a add_link operation
def link_child_to_parent(operation)
child_collection = operation.klass.send("#{operation.klass_name}") || []
child_collection << operation.child_klass
operation.klass.send("#{operation.klass_name}=", child_collection)
# Attach the parent to the child
parent_meta = @class_metadata[operation.klass.class.to_s][operation.klass_name]
child_meta = @class_metadata[operation.child_klass.class.to_s]
# Find the matching relationship on the child object
child_properties = Helpers.normalize_to_hash(
child_meta.select { |k, prop|
prop.nav_prop &&
prop.association.relationship == parent_meta.association.relationship })
child_property_to_set = child_properties.keys.first # There should be only one match
# TODO: Handle many to many scenarios where the child property is an enumerable
operation.child_klass.send("#{child_property_to_set}=", operation.klass)
end
def single_save(operation)
if operation.kind == "Add"
save_uri = build_save_uri(operation)
json_klass = operation.klass.to_json(:type => :add)
post_result = OData::Resource.new(save_uri, @rest_options).post json_klass, {:content_type => @json_type}
return build_classes_from_result(post_result.body)
elsif operation.kind == "Update"
update_uri = build_resource_uri(operation)
json_klass = operation.klass.to_json
update_result = OData::Resource.new(update_uri, @rest_options).put json_klass, {:content_type => @json_type}
return (update_result.status == 204)
elsif operation.kind == "Delete"
delete_uri = build_resource_uri(operation)
delete_result = OData::Resource.new(delete_uri, @rest_options).delete
return (delete_result.status == 204)
elsif operation.kind == "AddLink"
save_uri = build_add_link_uri(operation)
json_klass = operation.child_klass.to_json(:type => :link)
post_result = OData::Resource.new(save_uri, @rest_options).post json_klass, {:content_type => @json_type}
# Attach the child to the parent
link_child_to_parent(operation) if (post_result.status == 204)
return(post_result.status == 204)
end
end
# Batch Saves
def generate_guid
rand(36**12).to_s(36).insert(4, "-").insert(9, "-")
end
def batch_save(operations)
batch_num = generate_guid
changeset_num = generate_guid
batch_uri = build_batch_uri
body = build_batch_body(operations, batch_num, changeset_num)
result = OData::Resource.new( batch_uri, @rest_options).post body, {:content_type => "multipart/mixed; boundary=batch_#{batch_num}"}
# TODO: More result validation needs to be done.
# The result returns HTTP 202 even if there is an error in the batch
return (result.status == 202)
end
def build_batch_body(operations, batch_num, changeset_num)
# Header
body = "--batch_#{batch_num}\n"
body << "Content-Type: multipart/mixed;boundary=changeset_#{changeset_num}\n\n"
# Operations
operations.each do |operation|
body << build_batch_operation(operation, changeset_num)
body << "\n"
end
# Footer
body << "\n\n--changeset_#{changeset_num}--\n"
body << "--batch_#{batch_num}--"
return body
end
def build_batch_operation(operation, changeset_num)
accept_headers = "Accept-Charset: utf-8\n"
accept_headers << "Content-Type: application/json;charset=utf-8\n" unless operation.kind == "Delete"
accept_headers << "\n"
content = "--changeset_#{changeset_num}\n"
content << "Content-Type: application/http\n"
content << "Content-Transfer-Encoding: binary\n\n"
if operation.kind == "Add"
save_uri = "#{@uri}/#{operation.klass_name}"
json_klass = operation.klass.to_json(:type => :add)
content << "POST #{save_uri} HTTP/1.1\n"
content << accept_headers
content << json_klass
elsif operation.kind == "Update"
update_uri = operation.klass.send(:__metadata)[:uri]
json_klass = operation.klass.to_json
content << "PUT #{update_uri} HTTP/1.1\n"
content << accept_headers
content << json_klass
elsif operation.kind == "Delete"
delete_uri = operation.klass.send(:__metadata)[:uri]
content << "DELETE #{delete_uri} HTTP/1.1\n"
content << accept_headers
elsif
save_uri = build_add_link_uri(operation)
json_klass = operation.child_klass.to_json(:type => :link)
content << "POST #{save_uri} HTTP/1.1\n"
content << accept_headers
content << json_klass
link_child_to_parent(operation)
end
return content
end
# Complex Types
def complex_type_to_class(complex_type_xml)
type = Helpers.get_namespaced_attribute(complex_type_xml, 'type', 'm')
is_collection = false
# Extract the class name in case this is a Collection
if type =~ /\(([^)]*)\)/m
type = $~[1]
is_collection = true
collection = []
end
klass_name = qualify_class_name(type.split('.')[-1])
if is_collection
# extract the elements from the collection
elements = complex_type_xml.xpath(".//d:element", @namespaces)
elements.each do |e|
if type.match(/^Edm/)
collection << parse_value(e.content, type)
else
element = @classes[klass_name].new
fill_complex_type_properties(e, element)
collection << element
end
end
return collection
else
klass = @classes[klass_name].new
# Fill in the properties
fill_complex_type_properties(complex_type_xml, klass)
return klass
end
end
# Helper method for complex_type_to_class
def fill_complex_type_properties(complex_type_xml, klass)
properties = complex_type_xml.xpath(".//*")
properties.each do |prop|
klass.send "#{prop.name}=", parse_value_xml(prop)
end
end
# Field Converters
# Handles parsing datetimes from a string
def parse_date(sdate)
# Assume this is UTC if no timezone is specified
sdate = sdate + "Z" unless sdate.match(/Z|([+|-]\d{2}:\d{2})$/)
# This is to handle older versions of Ruby (e.g. ruby 1.8.7 (2010-12-23 patchlevel 330) [i386-mingw32])
# See http://makandra.com/notes/1017-maximum-representable-value-for-a-ruby-time-object
# In recent versions of Ruby, Time has a much larger range
begin
result = Time.parse(sdate)
rescue ArgumentError
result = DateTime.parse(sdate)
end
return result
end
# Parses a value into the proper type based on an xml property element
def parse_value_xml(property_xml)
property_type = Helpers.get_namespaced_attribute(property_xml, 'type', 'm')
property_null = Helpers.get_namespaced_attribute(property_xml, 'null', 'm')
if property_type.nil? || (property_type && property_type.match(/^Edm/))
return parse_value(property_xml.content, property_type, property_null)
end
complex_type_to_class(property_xml)
end
def parse_value(content, property_type = nil, property_null = nil)
# Handle anything marked as null
return nil if !property_null.nil? && property_null == "true"
# Handle a nil property type, this is a string
return content if property_type.nil?
# Handle integers
return content.to_i if property_type.match(/^Edm.Int/)
# Handle decimals
return content.to_d if property_type.match(/Edm.Decimal/)
# Handle DateTimes
# return Time.parse(property_xml.content) if property_type.match(/Edm.DateTime/)
return parse_date(content) if property_type.match(/Edm.DateTime/)
# If we can't parse the value, just return the element's content
content
end
# Parses a value into the proper type based on a specified return type
def parse_primative_type(value, return_type)
return value.to_i if return_type == Fixnum
return value.to_d if return_type == Float
return parse_date(value.to_s) if return_type == Time
return value.to_s
end
# Converts an edm type (string) to a ruby type
def edm_to_ruby_type(edm_type)
return String if edm_type =~ /Edm.String/
return Fixnum if edm_type =~ /^Edm.Int/
return Float if edm_type =~ /Edm.Decimal/
return Time if edm_type =~ /Edm.DateTime/
return String
end
# Method Missing Handlers
# Executes an import function
def execute_import_function(name, *args)
func = @function_imports[name]
# Check the args making sure that more weren't passed in than the function needs
param_count = func[:parameters].nil? ? 0 : func[:parameters].count
arg_count = args.nil? ? 0 : args[0].count
if arg_count > param_count
raise ArgumentError, "wrong number of arguments (#{arg_count} for #{param_count})"
end
# Convert the parameters to a hash
params = {}
func[:parameters].keys.each_with_index { |key, i| params[key] = args[0][i] } unless func[:parameters].nil?
function_uri = build_function_import_uri(name, params)
result = OData::Resource.new(function_uri, @rest_options).send(func[:http_method].downcase, {})
# Is this a 204 (No content) result?
return true if result.status == 204
# No? Then we need to parse the results. There are 4 kinds...
if func[:return_type] == Array
# a collection of entites
return build_classes_from_result(result.body) if @classes.include?(func[:inner_return_type].to_s)
# a collection of native types
elements = Nokogiri::XML(result.body).xpath("//ds:element", @ds_namespaces)
results = []
elements.each do |e|
results << parse_primative_type(e.content, func[:inner_return_type])
end
return results
end
# a single entity
if @classes.include?(func[:return_type].to_s)
entry = Nokogiri::XML(result.body).xpath("atom:entry[not(ancestor::atom:entry)]", @ds_namespaces)
return entry_to_class(entry)
end
# or a single native type
unless func[:return_type].nil?
e = Nokogiri::XML(result.body).xpath("/*").first
return parse_primative_type(e.content, func[:return_type])
end
# Nothing could be parsed, so just return if we got a 200 or not
return (result.status == 200)
end
# Helpers
def singular?(value)
value.singularize == value
end
end
|
PierreRambaud/gemirro | lib/gemirro/cache.rb | Gemirro.Cache.cache | ruby | def cache(key)
key_hash = key2hash(key)
read(key_hash) || (write(key_hash, yield) if block_given?)
end | Cache data
@param [String] key
@return [Mixed] | train | https://github.com/PierreRambaud/gemirro/blob/5c6b5abb5334ed3beb256f6764bc336e2cf2dc21/lib/gemirro/cache.rb#L54-L57 | class Cache
attr_reader :root_path
##
# Initialize cache root path
#
# @param [String] path
#
def initialize(path)
@root_path = path
create_root_path
end
##
# Create root path
#
def create_root_path
FileUtils.mkdir_p(@root_path)
end
##
# Flush cache directory
#
def flush
FileUtils.rm_rf(@root_path)
create_root_path
end
##
# Flush key
#
# @param [String] key
#
def flush_key(key)
path = key_path(key2hash(key))
FileUtils.rm_f(path)
end
##
# Cache data
#
# @param [String] key
#
# @return [Mixed]
#
private
##
# Convert key to hash
#
# @param [String] key
#
# @return [String]
#
def key2hash(key)
Digest::MD5.hexdigest(key)
end
##
# Path from key hash
#
# @param [String] key_hash
#
# @return [String]
#
def key_path(key_hash)
File.join(@root_path, key_hash)
end
##
# Read cache
#
# @param [String] key_hash
#
# @return [Mixed]
#
def read(key_hash)
path = key_path(key_hash)
Marshal.load(File.open(path)) if File.exist?(path)
end
##
# write cache
#
# @param [String] key_hash
# @param [Mixed] value
#
# @return [Mixed]
#
def write(key_hash, value)
return value if value.nil? || value.empty?
File.open(key_path(key_hash), 'wb') do |f|
Marshal.dump(value, f)
end
value
end
end
|
kontena/kontena | server/app/services/docker/streaming_executor.rb | Docker.StreamingExecutor.start | ruby | def start(ws)
@ws = ws
@ws.on(:message) do |event|
on_websocket_message(event.data)
end
@ws.on(:error) do |exc|
warn exc
end
@ws.on(:close) do |event|
on_websocket_close(event.code, event.reason)
end
started!
end | Does not raise.
@param ws [Faye::Websocket] | train | https://github.com/kontena/kontena/blob/5cb5b4457895985231ac88e78c8cbc5a8ffb5ec7/server/app/services/docker/streaming_executor.rb#L137-L153 | class StreamingExecutor
include Logging
# @param [Container] container
# @param [Boolean] shell
# @param [Boolean] stdin
# @param [Boolean] tty
def initialize(container, shell: false, interactive: false, tty: false)
@container = container
@shell = shell
@interactive = interactive
@tty = tty
@rpc_client = container.host_node.rpc_client
@exec_session = nil
@subscription = nil
@started = false
end
# @return [Boolean]
def interactive?
!!@interactive
end
# @return [Boolean]
def tty?
!!@tty
end
def started!
@started = true
end
# start() was successful
# @return [Boolean]
def started?
@started
end
def running!
@running = true
end
# exec is running, and ready to accept input/tty_resize
#
# @return [Boolean]
def running?
@running
end
# Valid after setup()
#
# @return [String] container exec RPC UUID
def exec_id
@exec_session['id']
end
# Setup RPC state
def setup
@exec_session = exec_create
@subscription = subscribe_to_exec(@exec_session['id'])
end
# @return [Hash{:id => String}]
def exec_create
@rpc_client.request('/containers/create_exec', @container.container_id).tap do |session|
debug { "exec create: #{session.inspect}" }
end
end
# @param cmd [Array<String>]
# @param tty [Boolean]
# @param stdin [Boolean]
def exec_run(cmd, shell: false, tty: false, stdin: false)
if shell
cmd = ['/bin/sh', '-c', cmd.join(' ')]
end
debug { "exec #{self.exec_id} run with shell=#{shell} tty=#{tty} stdin=#{stdin}: #{cmd.inspect}" }
@rpc_client.notify('/containers/run_exec', self.exec_id, cmd, tty, stdin)
running!
end
# @param width [Integer]
# @param height [Integer]
def exec_resize(width, height)
raise ArgumentError, "width must be integer" unless Integer === width
raise ArgumentError, "height must be integer" unless Integer === height
raise ArgumentError, "width and height must be integers > 0" unless width >= 0 && height >= 0
tty_size = { 'width' => width, 'height' => height }
debug { "exec #{self.exec_id} resize: #{tty_size.inspect}" }
@rpc_client.notify('/containers/tty_resize', self.exec_id, tty_size)
end
# @param stdin [String]
def exec_input(stdin)
debug { "exec #{self.exec_id} input: #{stdin.inspect}" }
@rpc_client.notify('/containers/tty_input', self.exec_id, stdin)
end
def exec_terminate
debug { "exec #{self.exec_id} terminate" }
@rpc_client.notify('/containers/terminate_exec', self.exec_id)
end
# @return [MongoPubsub::Subscription]
def subscribe_to_exec(id)
MongoPubsub.subscribe("container_exec:#{id}") do |data|
debug { "subscribe exec #{id}: #{data.inspect}" }
if data.has_key?('error')
websocket_write(error: data['error'])
websocket_close(4000)
elsif data.has_key?('exit')
websocket_write(exit: data['exit'])
websocket_close(1000)
elsif data.has_key?('stream')
websocket_write(stream: data['stream'], chunk: data['chunk'])
else
error "invalid container exec #{id} RPC: #{data.inspect}"
end
end
end
# Does not raise.
#
# @param ws [Faye::Websocket]
# @param data [Hash] Write websocket JSON frame
def websocket_write(data)
debug { "websocket write: #{data.inspect}" }
msg = JSON.dump(data)
EventMachine.schedule {
@ws.send(msg)
}
end
# @param code [Integer]
# @param reason [String]
def websocket_close(code, reason = nil)
debug { "websocket close with code #{code}: #{reason}"}
EventMachine.schedule {
@ws.close(code, reason)
}
end
def on_websocket_message(msg)
data = JSON.parse(msg)
debug { "websocket message: #{data.inspect}"}
if data.has_key?('cmd')
fail "unexpected cmd: already running" if running?
exec_run(data['cmd'], shell: @shell, tty: @tty, stdin: @interactive)
end
if data.has_key?('stdin')
fail "unexpected stdin: not interactive" unless interactive?
fail "unexpected stdin: not running" unless running?
exec_input(data['stdin'])
end
if data.has_key?('tty_size')
fail "unexpected tty_size: not a tty" unless tty?
fail "unexpected tty_size: not running" unless running?
exec_resize(data['tty_size']['width'], data['tty_size']['height'])
end
rescue JSON::ParserError => exc
warn "invalid websocket JSON: #{exc}"
abort exc
rescue => exc
error exc
abort exc
end
# @param code [Integer]
# @param reason [String]
def on_websocket_close(code, reason)
debug "websocket closed with code #{code}: #{reason}"
self.teardown
end
# Abort exec on error.
#
# Closes client websocket, terminates the exec RPC.
#
# @param exc [Exception]
def abort(exc)
websocket_close(4000, "#{exc.class}: #{exc}")
self.teardown
end
# Release resources from #setup()
#
# Can be called multiple times (abort -> on_websocket_close)
def teardown
if @subscription
@subscription.terminate
@subscription = nil
end
if @exec_session
exec_terminate
@exec_session = nil
end
end
end
|
tbpgr/tudu | lib/tudu_dsl.rb | Tudu.Dsl.target_type | ruby | def target_type(target_type)
return if target_type.nil?
return unless [String, Symbol].include?(target_type.class)
target_type = target_type.to_sym if target_type.instance_of? String
return unless TARGET_TYPES.include? target_type
@_target_type = target_type
end | == initialize Dsl
== initialize Dsl
=== Params
- target_type: target notice type | train | https://github.com/tbpgr/tudu/blob/4098054b836c0d0b18f89ae71a449e2fe26a0647/lib/tudu_dsl.rb#L27-L33 | class Dsl
# == TARGET_TYPES
# notice target types
# === types
#- none: no notice
#- mail: mail notice
TARGET_TYPES = { none: :none, mail: :mail }
# == notice target type
attr_accessor :_target_type
# == notice targets
attr_accessor :_targets
# == initialize Dsl
def initialize
@_target_type = TARGET_TYPES[:none]
@_targets = []
end
# == initialize Dsl
# === Params
#- target_type: target notice type
def targets(target_type)
return if target_type.nil?
return unless target_type.instance_of? Array
@_targets = target_type
end
end
|
kikonen/capybara-ng | lib/angular/dsl.rb | Angular.DSL.ng_repeater_column | ruby | def ng_repeater_column(repeater, binding, opt = {})
opt[:root_selector] ||= ng_root_selector
row = ng.row(opt)
ng_repeater_columns(repeater, binding, opt)[row]
end | Node for column binding value in row
@param opt
- :row
- :root_selector
- :wait
@return nth node | train | https://github.com/kikonen/capybara-ng/blob/a24bc9570629fe2bb441763803dd8aa0d046d46d/lib/angular/dsl.rb#L309-L313 | module DSL
def ng
Capybara.current_session.ng
end
#
# Get or set selector to find ng-app for current capybara test session
#
# TIP: try using '[ng-app]', which will find ng-app as attribute anywhere.
#
# @param root_selector if nil then return current value without change
# @return test specific selector to find ng-app,
# by default global ::Angular.root_selector is used.
#
def ng_root_selector(root_selector = nil)
opt = ng.page.ng_session_options
if root_selector
opt[:root_selector] = root_selector
end
opt[:root_selector] || ::Angular.root_selector
end
#
# Setup AngularJS test hooks in web page. In normal usage there is no need
# to use this
#
def ng_install
ng.install
end
#
# Wait that AngularJS is ready
#
def ng_wait
ng.ng_wait
end
#
# @param opt
# - :root_selector
# - :wait
# @return current location absolute url
#
def ng_location_abs(opt = {})
selector = opt.delete(:root_selector) || ng_root_selector
ng.make_call :getLocationAbsUrl, [selector], opt
end
#
# @param opt
# - :root_selector
# - :wait
# @return current location absolute url
#
def ng_location(opt = {})
selector = opt.delete(:root_selector) || ng_root_selector
ng.make_call :getLocation, [selector], opt
end
#
# @param opt
# - :root_selector
# - :wait
# @return current location
#
def ng_set_location(url, opt = {})
selector = opt.delete(:root_selector) || ng_root_selector
ng.make_call :setLocation, [selector, url], opt
end
#
# @param opt
# - :root_selector
# - :wait
# @return eval result
#
def ng_eval(expr, opt = {})
selector = opt.delete(:root_selector) || ng_root_selector
ng.make_call :evaluate, [selector, expr], opt
end
#
# Does binding exist
#
# @param opt
# - :exact
# - :root_selector
# - :wait
# @return true | false
#
def has_ng_binding?(binding, opt = {})
ng_bindings(model, opt)
true
rescue NotFound
false
end
#
# Node for nth binding match
#
# @param opt
# - :row
# - :exact
# - :root_selector
# - :wait
# @return nth node
#
def ng_binding(binding, opt = {})
opt[:root_selector] ||= ng_root_selector
row = ng.row(opt)
ng_bindings(binding, opt)[row]
end
#
# All nodes matching binding
#
# @param opt
# - :exact
# - :root_selector
# - :wait
# @return [node, ...]
#
def ng_bindings(binding, opt = {})
opt[:root_selector] ||= ng_root_selector
ng.get_nodes_2 :findBindingsIds, [binding, opt[:exact] == true], opt
end
#
# Does model exist
#
# @param opt
# - :root_selector
# - :wait
# @return true | false
#
def has_ng_model?(model, opt = {})
ng_models(model, opt)
true
rescue NotFound
false
end
#
# Does model not exist
#
# @param opt
# - :root_selector
# - :wait
# @return true | false
#
def has_no_ng_model?(model, opt = {})
!has_ng_model?(model, opt)
end
#
# Node for nth model match
#
# @param opt
# - :row
# - :root_selector
# - :wait
# @return nth node
#
def ng_model(model, opt = {})
opt[:root_selector] ||= ng_root_selector
row = ng.row(opt)
ng_models(model, opt)[row]
end
#
# All nodes matching model
#
# @param opt
# - :root_selector
# - :wait
# @return [node, ...]
#
def ng_models(model, opt = {})
opt[:root_selector] ||= ng_root_selector
ng.get_nodes_2 :findByModelIds, [model], opt
end
#
# Does option exist
#
# @param opt
# - :root_selector
# - :wait
# @return true | false
#
def has_ng_options?(options, opt = {})
opt[:root_selector] ||= ng_root_selector
ng_options(options, opt)
true
rescue NotFound
false
end
#
# Does option not exist
#
# @param opt
# - :root_selector
# - :wait
# @return true | false
#
def has_no_ng_options?(options, opt = {})
!has_ng_options?(options, opt)
end
#
# Node for nth option
#
# @param opt
# - :row
# - :root_selector
# - :wait
# @return nth node
#
def ng_option(options, opt = {})
opt[:root_selector] ||= ng_root_selector
row = ng.row(opt)
ng_options(options, opt)[row]
end
#
# All option values matching option
#
# @param opt
# - :root_selector
# - :wait
# @return [node, ...]
#
def ng_options(options, opt = {})
opt[:root_selector] ||= ng_root_selector
ng.get_nodes_2(:findByOptionsIds, [options], opt)
end
#
# Does row exist
#
# @param opt
# - :root_selector
# - :wait
# @return true | false
#
def has_ng_repeater_row?(repeater, opt = {})
ng_repeater_row(repeater, opt)
true
rescue NotFound
false
end
#
# Does row not exist
#
# @param opt
# - :root_selector
# - :wait
# @return true | false
#
def has_no_ng_repeater_row?(repeater, opt = {})
!has_ng_repeater_rows?(repeater, opt)
end
#
# Node for nth repeater row
#
# @param opt
# - :row
# - :root_selector
# - :wait
# @return nth node
#
def ng_repeater_row(repeater, opt = {})
opt[:root_selector] ||= ng_root_selector
row = ng.row(opt)
data = ng.get_nodes_2(:findRepeaterRowsIds, [repeater, row], opt)
data.first
end
#
# All nodes matching repeater
#
# @param opt
# - :root_selector
# - :wait
# @return [node, ...]
#
def ng_repeater_rows(repeater, opt = {})
opt[:root_selector] ||= ng_root_selector
ng.get_nodes_2 :findAllRepeaterRowsIds, [repeater], opt
end
#
# Node for column binding value in row
#
# @param opt
# - :row
# - :root_selector
# - :wait
# @return nth node
#
#
# Node for column binding value in all rows
#
# @param opt
# - :root_selector
# - :wait
# @return [node, ...]
#
def ng_repeater_columns(repeater, binding, opt = {})
opt[:root_selector] ||= ng_root_selector
ng.get_nodes_2 :findRepeaterColumnIds, [repeater, binding], opt
end
#
# @param opt
# - :row
# - :root_selector
# - :wait
# @return nth node
#
def ng_repeater_element(repeater, index, binding, opt = {})
opt[:root_selector] ||= ng_root_selector
row = ng.row(opt)
ng_repeater_elements(repeater, index, binding, opt)[row]
end
#
# @param opt
# - :root_selector
# - :wait
# @return [node, ...]
#
def ng_repeater_elements(repeater, index, binding, opt = {})
opt[:root_selector] ||= ng_root_selector
ng.get_nodes_2 :findRepeaterElementIds, [repeater, index, binding], opt
end
end
|
xi-livecode/xi | lib/xi/pattern.rb | Xi.Pattern.each_event | ruby | def each_event(cycle=0)
return enum_for(__method__, cycle) unless block_given?
EventEnumerator.new(self, cycle).each { |v, s, d, i| yield v, s, d, i }
end | Calls the given block once for each event, passing its value, start
position, duration and iteration as parameters.
+cycle+ can be any number, even if there is no event that starts exactly
at that moment. It will start from the next event.
If no block is given, an enumerator is returned instead.
Enumeration loops forever, and starts yielding events based on pattern's
delta and from the +cycle+ position, which is by default 0.
@example block yields value, start, duration and iteration
Pattern.new([1, 2], delta: 0.25).each_event.take(4)
# => [[1, 0.0, 0.25, 0],
# [2, 0.25, 0.25, 0],
# [1, 0.5, 0.25, 1],
# [2, 0.75, 0.25, 1]]
@example +cycle+ is used to start iterating from that moment in time
Pattern.new([:a, :b, :c], delta: 1/2).each_event(42).take(4)
# => [[:a, (42/1), (1/2), 28],
# [:b, (85/2), (1/2), 28],
# [:c, (43/1), (1/2), 28],
# [:a, (87/2), (1/2), 29]]
@example +cycle+ can also be a fractional number
Pattern.new([:a, :b, :c]).each_event(0.97).take(3)
# => [[:b, 1, 1, 0],
# [:c, 2, 1, 0],
# [:a, 3, 1, 1]]
@param cycle [Numeric]
@yield [v, s, d, i] value, start, duration and iteration
@return [Enumerator] | train | https://github.com/xi-livecode/xi/blob/215dfb84899b3dd00f11089ae3eab0febf498e95/lib/xi/pattern.rb#L194-L197 | class Pattern
extend Generators
include Transforms
# Array or Proc that produces values or events
attr_reader :source
# Event delta in terms of cycles (default: 1)
attr_reader :delta
# Hash that contains metadata related to pattern usage
attr_reader :metadata
# Size of pattern
attr_reader :size
# Duration of pattern
attr_reader :duration
# Creates a new Pattern given either a +source+ or a +block+ that yields
# events.
#
# If a block is given, +yielder+ parameter must yield +value+ and +start+
# (optional) for each event.
#
# @example Pattern from an Array
# Pattern.new(['a', 'b', 'c']).take(5)
# # => [['a', 0, 1, 0],
# # ['b', 1, 1, 0],
# # ['c', 2, 1, 0],
# # ['a', 3, 1, 1], # starts cycling...
# # ['b', 4, 1, 1]]
#
# @example Pattern from a block that yields only values.
# Pattern.new { |y| y << rand(100) }.take(5)
# # => [[52, 0, 1, 0],
# # [8, 1, 1, 0],
# # [83, 2, 1, 0],
# # [25, 3, 1, 0],
# # [3, 4, 1, 0]]
#
# @param source [Array]
# @param size [Integer] number of events per iteration
# @param delta [Numeric, Array<Numeric>, Pattern<Numeric>] event delta
# @param metadata [Hash]
# @yield [yielder, delta] yielder and event delta
# @yieldreturn [value, start, duration]
# @return [Pattern]
#
def initialize(source=nil, size: nil, delta: nil, **metadata, &block)
if source.nil? && block.nil?
fail ArgumentError, 'must provide source or block'
end
if delta && delta.respond_to?(:size) && !(delta.size < Float::INFINITY)
fail ArgumentError, 'delta cannot be infinite'
end
# If delta is an array of 1 or 0 values, flatten array
delta = delta.first if delta.is_a?(Array) && delta.size <= 1
# Block takes precedence as source, even though +source+ can be used to
# infer attributes
@source = block || source
# Infer attributes from +source+ if it is a pattern
if source.is_a?(Pattern)
@delta = source.delta
@size = source.size
@metadata = source.metadata
else
@delta = 1
@size = (source.respond_to?(:size) ? source.size : nil) ||
Float::INFINITY
@metadata = {}
end
# Flatten source if it is a pattern
@source = @source.source if @source.is_a?(Pattern)
# Override or merge custom attributes if they were specified
@size = size if size
@delta = delta if delta
@metadata.merge!(metadata)
# Flatten delta values to an array, if it is an enumerable or pattern
@delta = @delta.to_a if @delta.respond_to?(:to_a)
# Set duration based on delta values
@duration = delta_values.reduce(:+) || 0
end
# Create a new Pattern given an array of +args+
#
# @see Pattern#initialize
#
# @param args [Array]
# @param kwargs [Hash]
# @return [Pattern]
#
def self.[](*args, **kwargs)
new(args, **kwargs)
end
# Returns a new Pattern with the same +source+, but with +delta+ overriden
# and +metadata+ merged.
#
# @param delta [Array<Numeric>, Pattern<Numeric>, Numeric]
# @param metadata [Hash]
# @return [Pattern]
#
def p(*delta, **metadata)
delta = delta.compact.empty? ? @delta : delta
Pattern.new(@source, delta: delta, size: @size, **@metadata.merge(metadata))
end
# Returns true if pattern is infinite
#
# A Pattern is infinite if it was created from a Proc or another infinite
# pattern, and size was not specified.
#
# @return [Boolean]
# @see #finite?
#
def infinite?
@size == Float::INFINITY
end
# Returns true if pattern is finite
#
# A pattern is finite if it has a finite size.
#
# @return [Boolean]
# @see #infinite?
#
def finite?
!infinite?
end
# Calls the given block once for each event, passing its value, start
# position, duration and iteration as parameters.
#
# +cycle+ can be any number, even if there is no event that starts exactly
# at that moment. It will start from the next event.
#
# If no block is given, an enumerator is returned instead.
#
# Enumeration loops forever, and starts yielding events based on pattern's
# delta and from the +cycle+ position, which is by default 0.
#
# @example block yields value, start, duration and iteration
# Pattern.new([1, 2], delta: 0.25).each_event.take(4)
# # => [[1, 0.0, 0.25, 0],
# # [2, 0.25, 0.25, 0],
# # [1, 0.5, 0.25, 1],
# # [2, 0.75, 0.25, 1]]
#
# @example +cycle+ is used to start iterating from that moment in time
# Pattern.new([:a, :b, :c], delta: 1/2).each_event(42).take(4)
# # => [[:a, (42/1), (1/2), 28],
# # [:b, (85/2), (1/2), 28],
# # [:c, (43/1), (1/2), 28],
# # [:a, (87/2), (1/2), 29]]
#
# @example +cycle+ can also be a fractional number
# Pattern.new([:a, :b, :c]).each_event(0.97).take(3)
# # => [[:b, 1, 1, 0],
# # [:c, 2, 1, 0],
# # [:a, 3, 1, 1]]
#
# @param cycle [Numeric]
# @yield [v, s, d, i] value, start, duration and iteration
# @return [Enumerator]
#
# Calls the given block passing the delta of each value in pattern
#
# This method is used internally by {#each_event} to calculate when each
# event in pattern occurs in time. If no block is given, an Enumerator is
# returned instead.
#
# @param index [Numeric]
# @yield [d] duration
# @return [Enumerator]
#
def each_delta(index=0)
return enum_for(__method__, index) unless block_given?
delta = @delta
if delta.is_a?(Array)
size = delta.size
return if size == 0
start = index.floor
i = start % size
loop do
yield delta[i]
i = (i + 1) % size
start += 1
end
elsif delta.is_a?(Pattern)
delta.each_event(index) { |v, _| yield v }
else
loop { yield delta }
end
end
# Calls the given block once for each value in source
#
# @example
# Pattern.new([1, 2, 3]).each.to_a
# # => [1, 2, 3]
#
# @return [Enumerator]
# @yield [Object] value
#
def each
return enum_for(__method__) unless block_given?
each_event { |v, _, _, i|
break if i > 0
yield v
}
end
# Same as {#each} but in reverse order
#
# @example
# Pattern.new([1, 2, 3]).reverse_each.to_a
# # => [3, 2, 1]
#
# @return [Enumerator]
# @yield [Object] value
#
def reverse_each
return enum_for(__method__) unless block_given?
each.to_a.reverse.each { |v| yield v }
end
# Returns an array of values from a single iteration of pattern
#
# @return [Array] values
# @see #to_events
#
def to_a
fail StandardError, 'pattern is infinite' if infinite?
each.to_a
end
# Returns an array of events (i.e. a tuple [value, start, duration,
# iteration]) from the first iteration.
#
# Only applies to finite patterns.
#
# @return [Array] events
# @see #to_a
#
def to_events
fail StandardError, 'pattern is infinite' if infinite?
each_event.take(size)
end
# Returns a new Pattern with the results of running +block+ once for every
# value in +self+
#
# If no block is given, an Enumerator is returned.
#
# @yield [v, s, d, i] value, start, duration and iteration
# @yieldreturn [v, s, d] value, start (optional) and duration (optional)
# @return [Pattern]
#
def map
return enum_for(__method__) unless block_given?
Pattern.new(self) do |y, d|
each_event do |v, s, ed, i|
y << yield(v, s, ed, i)
end
end
end
alias_method :collect, :map
# Returns a Pattern containing all events of +self+ for which +block+ is
# true.
#
# If no block is given, an Enumerator is returned.
#
# @see Pattern#reject
#
# @yield [v, s, d, i] value, start, duration and iteration
# @yieldreturn [Boolean] whether value is selected
# @return [Pattern]
#
def select
return enum_for(__method__) unless block_given?
Pattern.new(self) do |y, d|
each_event do |v, s, ed, i|
y << v if yield(v, s, ed, i)
end
end
end
alias_method :find_all, :select
# Returns a Pattern containing all events of +self+ for which +block+
# is false.
#
# If no block is given, an Enumerator is returned.
#
# @see Pattern#select
#
# @yield [v, s, d, i] value, start, duration and iteration
# @yieldreturn [Boolean] whether event is rejected
# @return [Pattern]
#
def reject
return enum_for(__method__) unless block_given?
select { |v, s, d, i| !yield(v, s, d, i) }
end
# Returns the first +n+ events from the pattern, starting from +cycle+
#
# @param n [Integer]
# @param cycle [Numeric]
# @return [Array] values
#
def take(n, cycle=0)
each_event(cycle).take(n)
end
# Returns the first +n+ values from +self+, starting from +cycle+.
#
# Only values are returned, start position and duration are ignored.
#
# @see #take
#
def take_values(*args)
take(*args).map(&:first)
end
# @see #take_values
def peek(n=10, *args)
take_values(n, *args)
end
# @see #take
def peek_events(n=10, cycle=0)
take(n, cycle)
end
# Returns the first element, or the first +n+ elements, of the pattern.
#
# If the pattern is empty, the first form returns nil, and the second form
# returns an empty array.
#
# @see #take
#
# @param n [Integer]
# @param args same arguments as {#take}
# @return [Object, Array]
#
def first(n=nil, *args)
res = take(n || 1, *args)
n.nil? ? res.first : res
end
# Returns a string containing a human-readable representation
#
# When source is not a Proc, this string can be evaluated to construct the
# same instance.
#
# @return [String]
#
def inspect
ss = if @source.respond_to?(:join)
@source.map(&:inspect).join(', ')
elsif @source.is_a?(Proc)
"?proc"
else
@source.inspect
end
ms = @metadata.reject { |_, v| v.nil? }
ms.merge!(delta: delta) if delta != 1
ms = ms.map { |k, v| "#{k}: #{v.inspect}" }.join(', ')
"P[#{ss}#{", #{ms}" unless ms.empty?}]"
end
alias_method :to_s, :inspect
# Returns pattern interation size or length
#
# This is usually calculated from the least-common multiple between the sum
# of delta values and the size of the pattern. If pattern is infinite,
# pattern size is assumed to be 1, so iteration size depends on delta
# values.
#
# @return [Integer]
#
def iteration_size
finite? ? delta_size.lcm(@size) : delta_size
end
# @private
def ==(o)
self.class == o.class &&
delta == o.delta &&
size == o.size &&
duration == o.duration &&
metadata == o.metadata &&
(finite? && to_a == o.to_a)
end
private
class EventEnumerator
def initialize(pattern, cycle)
@cycle = cycle
@source = pattern.source
@size = pattern.size
@iter_size = pattern.iteration_size
@iter = pattern.duration > 0 ? (cycle / pattern.duration).floor : 0
@delta_enum = pattern.each_delta(@iter * @iter_size)
@start = @iter * pattern.duration
@prev_ev = nil
@i = 0
end
def each(&block)
return enum_for(__method__, @cycle) unless block_given?
return if @size == 0
if @source.respond_to?(:call)
loop do
yielder = ::Enumerator::Yielder.new do |value|
each_block(value, &block)
end
@source.call(yielder, @delta_enum.peek)
end
elsif @source.respond_to?(:each_event)
@source.each_event(@start) do |value, _|
each_block(value, &block)
end
elsif @source.respond_to?(:[])
loop do
each_block(@source[@i % @size], &block)
end
else
fail StandardError, 'invalid source'
end
end
private
def each_block(value)
delta = @delta_enum.peek
if @start >= @cycle
if @prev_ev
yield @prev_ev if @start > @cycle
@prev_ev = nil
end
yield value, @start, delta, @iter
else
@prev_ev = [value, @start, delta, @iter]
end
@iter += 1 if @i + 1 == @iter_size
@i = (@i + 1) % @iter_size
@start += delta
@delta_enum.next
end
end
def delta_values
each_delta.take(iteration_size)
end
def delta_size
@delta.respond_to?(:each) && @delta.respond_to?(:size) ? @delta.size : 1
end
end
|
mongodb/mongoid | lib/mongoid/serializable.rb | Mongoid.Serializable.relation_options | ruby | def relation_options(inclusions, options, name)
if inclusions.is_a?(Hash)
inclusions[name]
else
{ except: options[:except], only: options[:only] }
end
end | Since the inclusions can be a hash, symbol, or array of symbols, this is
provided as a convenience to parse out the options.
@example Get the association options.
document.relation_names(:include => [ :addresses ])
@param [ Hash, Symbol, Array<Symbol> ] inclusions The inclusions.
@param [ Hash ] options The options.
@param [ Symbol ] name The name of the association.
@return [ Hash ] The options for the association.
@since 2.0.0.rc.6 | train | https://github.com/mongodb/mongoid/blob/56976e32610f4c2450882b0bfe14da099f0703f4/lib/mongoid/serializable.rb#L164-L170 | module Serializable
extend ActiveSupport::Concern
# We need to redefine where the JSON configuration is getting defined,
# similar to +ActiveRecord+.
included do
undef_method :include_root_in_json
delegate :include_root_in_json, to: ::Mongoid
end
# Gets the document as a serializable hash, used by ActiveModel's JSON
# serializer.
#
# @example Get the serializable hash.
# document.serializable_hash
#
# @example Get the serializable hash with options.
# document.serializable_hash(:include => :addresses)
#
# @param [ Hash ] options The options to pass.
#
# @option options [ Symbol ] :include What associations to include.
# @option options [ Symbol ] :only Limit the fields to only these.
# @option options [ Symbol ] :except Dont include these fields.
# @option options [ Symbol ] :methods What methods to include.
#
# @return [ Hash ] The document, ready to be serialized.
#
# @since 2.0.0.rc.6
def serializable_hash(options = nil)
options ||= {}
attrs = {}
names = field_names(options)
method_names = Array.wrap(options[:methods]).map do |name|
name.to_s if respond_to?(name)
end.compact
(names + method_names).each do |name|
without_autobuild do
serialize_attribute(attrs, name, names, options)
end
end
serialize_relations(attrs, options) if options[:include]
attrs
end
private
# Get the names of all fields that will be serialized.
#
# @api private
#
# @example Get all the field names.
# document.send(:field_names)
#
# @return [ Array<String> ] The names of the fields.
#
# @since 3.0.0
def field_names(options)
names = (as_attributes.keys + attribute_names).uniq.sort
only = Array.wrap(options[:only]).map(&:to_s)
except = Array.wrap(options[:except]).map(&:to_s)
except |= ['_type'] unless Mongoid.include_type_for_serialization
if !only.empty?
names &= only
elsif !except.empty?
names -= except
end
names
end
# Serialize a single attribute. Handles associations, fields, and dynamic
# attributes.
#
# @api private
#
# @example Serialize the attribute.
# document.serialize_attribute({}, "id" , [ "id" ])
#
# @param [ Hash ] attrs The attributes.
# @param [ String ] name The attribute name.
# @param [ Array<String> ] names The names of all attributes.
# @param [ Hash ] options The options.
#
# @return [ Object ] The attribute.
#
# @since 3.0.0
def serialize_attribute(attrs, name, names, options)
if relations.key?(name)
value = send(name)
attrs[name] = value ? value.serializable_hash(options) : nil
elsif names.include?(name) && !fields.key?(name)
attrs[name] = read_raw_attribute(name)
elsif !attribute_missing?(name)
attrs[name] = send(name)
end
end
# For each of the provided include options, get the association needed and
# provide it in the hash.
#
# @example Serialize the included associations.
# document.serialize_relations({}, :include => :addresses)
#
# @param [ Hash ] attributes The attributes to serialize.
# @param [ Hash ] options The serialization options.
#
# @option options [ Symbol ] :include What associations to include
# @option options [ Symbol ] :only Limit the fields to only these.
# @option options [ Symbol ] :except Dont include these fields.
#
# @since 2.0.0.rc.6
def serialize_relations(attributes = {}, options = {})
inclusions = options[:include]
relation_names(inclusions).each do |name|
association = relations[name.to_s]
if association && relation = send(association.name)
attributes[association.name.to_s] =
relation.serializable_hash(relation_options(inclusions, options, name))
end
end
end
# Since the inclusions can be a hash, symbol, or array of symbols, this is
# provided as a convenience to parse out the names.
#
# @example Get the association names.
# document.relation_names(:include => [ :addresses ])
#
# @param [ Hash, Symbol, Array<Symbol> ] inclusions The inclusions.
#
# @return [ Array<Symbol> ] The names of the included associations.
#
# @since 2.0.0.rc.6
def relation_names(inclusions)
inclusions.is_a?(Hash) ? inclusions.keys : Array.wrap(inclusions)
end
# Since the inclusions can be a hash, symbol, or array of symbols, this is
# provided as a convenience to parse out the options.
#
# @example Get the association options.
# document.relation_names(:include => [ :addresses ])
#
# @param [ Hash, Symbol, Array<Symbol> ] inclusions The inclusions.
# @param [ Hash ] options The options.
# @param [ Symbol ] name The name of the association.
#
# @return [ Hash ] The options for the association.
#
# @since 2.0.0.rc.6
end
|
dropofwill/rtasklib | lib/rtasklib/controller.rb | Rtasklib.Controller.count | ruby | def count ids: nil, tags: nil, dom: nil, active: true
f = Helpers.filter(ids: ids, tags: tags, dom: dom)
a = Helpers.pending_or_waiting(active)
Execute.task_popen3(*@override_a, f, a, "count") do |i, o, e, t|
return Integer(o.read)
end
end | Count the number of tasks that match a given filter. Faster than counting
an array returned by Controller#all or Controller#some.
@param ids [Array<Range, Fixnum, String>, String, Range, Fixnum]
@param tags [Array<String>, String]
@param dom [Array<String>, String]
@param active [Boolean] return only pending & waiting tasks
@api public | train | https://github.com/dropofwill/rtasklib/blob/c3a69a7188765e5d662d9d0d1fd5d4f87dc74d8c/lib/rtasklib/controller.rb#L124-L130 | module Controller
extend self
# Retrieves the current task list from the TaskWarrior database. Defaults
# to just show active (waiting & pending) tasks, which is usually what is
# exposed to the end user through the default reports. To see everything
# including completed, deleted, and parent recurring tasks, set
# `active: false`. For more granular control see Controller#some.
#
# @example
# tw.all.count #=> 200
# tw.all(active: true) #=> 200
# tw.all(active: false) #=> 578
#
# @param active [Boolean] return only pending & waiting tasks
# @return [Array<Models::TaskModel>]
# @api public
def all active: true
all = []
f = Helpers.pending_or_waiting(active)
Execute.task_popen3(*override_a, f, "export") do |i, o, e, t|
all = MultiJson.load(o.read).map do |x|
Rtasklib::Models::TaskModel.new(x)
end
end
return all
end
# Retrieves the current task list filtered by id, tag, or a dom query
#
# @example filter by an array of ids
# tw.some(ids: [1..2, 5])
# @example filter by tags
# tw.some(tags: ["+school", "or", "-work"]
# # You can also pass in a TW style string if you prefer
# tw.some(tags: "+school or -work"]
# @example filter by a dom query
# require "date"
# today = DateTime.now
# # note that queries with dots need to be Strings, as they would be
# # invalid Symbols
# tw.some(dom: {project: "Work", "due.before" => today})
# # You can also pass in a TW style string if you prefer
# tw.some(dom: "project:Work due.before:#{today}")
#
# @param ids [Array<Range, Fixnum, String>, String, Range, Fixnum]
# @param tags [Array<String>, String]
# @param dom [Array<String>, String]
# @param active [Boolean] return only pending & waiting tasks
# @return [Array<Models::TaskModel>]
# @api public
def some ids: nil, tags: nil, dom: nil, active: true
some = []
f = Helpers.filter(ids: ids, tags: tags, dom: dom)
a = Helpers.pending_or_waiting(active)
Execute.task_popen3(*@override_a, f, a, "export") do |i, o, e, t|
some = MultiJson.load(o.read).map do |x|
Rtasklib::Models::TaskModel.new(x)
end
end
return some
end
# Count the number of tasks that match a given filter. Faster than counting
# an array returned by Controller#all or Controller#some.
#
# @param ids [Array<Range, Fixnum, String>, String, Range, Fixnum]
# @param tags [Array<String>, String]
# @param dom [Array<String>, String]
# @param active [Boolean] return only pending & waiting tasks
# @api public
alias_method :size, :count
alias_method :length, :count
# Calls `task _show` with initial overrides returns a Taskrc object of the
# result
#
# @return [Rtasklib::Taskrc]
# @api public
def get_rc
res = []
Execute.task_popen3(*@override_a, "_show") do |i, o, e, t|
res = o.read.each_line.map { |l| l.chomp }
end
Taskrc.new(res, :array)
end
# Calls `task _version` and returns the result
#
# @return [String]
# @api public
def get_version
version = nil
Execute.task_popen3("_version") do |i, o, e, t|
version = Helpers.to_gem_version(o.read.chomp)
end
version
end
# Mark the filter of tasks as started
# Returns false if filter (ids:, tags:, dom:) is blank.
#
# @example
# tw.start!(ids: 1)
#
# @param ids [Array<Range, Fixnum, String>, String, Range, Fixnum]
# @param tags [Array<String>, String]
# @param dom [Array<String>, String]
# @param active [Boolean] return only pending & waiting tasks
# @return [Process::Status, False] the exit status of the thread or false
# if it exited early because filter was blank.
# @api public
def start! ids: nil, tags: nil, dom: nil, active: true
f = Helpers.filter(ids: ids, tags: tags, dom: dom)
a = Helpers.pending_or_waiting(active)
return false if f.blank?
Execute.task_popen3(*@override_a, f, a, "start") do |i, o, e, t|
return t.value
end
end
# Mark the filter of tasks as stopped
# Returns false if filter (ids:, tags:, dom:) is blank.
#
# @param ids [Array<Range, Fixnum, String>, String, Range, Fixnum]
# @param tags [Array<String>, String]
# @param dom [Array<String>, String]
# @param active [Boolean] return only pending & waiting tasks
# @return [Process::Status, False] the exit status of the thread or false
# if it exited early because filter was blank.
# @api public
def stop! ids: nil, tags: nil, dom: nil, active: true
f = Helpers.filter(ids: ids, tags: tags, dom: dom)
a = Helpers.pending_or_waiting(active)
return false if f.blank?
Execute.task_popen3(*@override_a, f, a, "stop") do |i, o, e, t|
return t.value
end
end
# Add a single task to the database w/required description and optional
# tags and dom queries (e.g. project:Work)
#
# @param description [String] the required desc of the task
# @param tags [Array<String>, String]
# @param dom [Array<String>, String]
# @return [Process::Status] the exit status of the thread
# @api public
def add! description, tags: nil, dom: nil
f = Helpers.filter(tags: tags, dom: dom)
d = Helpers.wrap_string(description)
Execute.task_popen3(*override_a, "add", d, f) do |i, o, e, t|
return t.value
end
end
# Modify a set of task the match the input filter with a single attr/value
# pair.
# Returns false if filter (ids:, tags:, dom:) is blank.
#
# @param attr [String]
# @param val [String]
# @param ids [Array<Range, Fixnum, String>, String, Range, Fixnum]
# @param tags [Array<String>, String]
# @param dom [Array<String>, String]
# @param active [Boolean] return only pending & waiting tasks
# @return [Process::Status] the exit status of the thread
# @api public
def modify! attr, val, ids: nil, tags: nil, dom: nil, active: true
f = Helpers.filter(ids: ids, tags: tags, dom: dom)
a = Helpers.pending_or_waiting(active)
return false if f.blank?
query = "#{f} #{a} modify #{attr}:#{val}"
Execute.task_popen3(*override_a, query) do |i, o, e, t|
return t.value
end
end
# Finishes the filtered tasks.
# Returns false if filter (ids:, tags:, dom:) is blank.
#
# @param ids [Array<Range, Fixnum, String>, String, Range, Fixnum]
# @param tags [Array<String>, String]
# @param dom [Array<String>, String]
# @param active [Boolean] return only pending & waiting tasks
# @return [Process::Status] the exit status of the thread
# @api public
def done! ids: nil, tags: nil, dom: nil, active: true
f = Helpers.filter(ids: ids, tags: tags, dom: dom)
a = Helpers.pending_or_waiting(active)
return false if f.blank?
Execute.task_popen3(*override_a, f, a, "done") do |i, o, e, t|
return t.value
end
end
# Returns false if filter is blank.
#
# @param ids [Array<Range, Fixnum, String>, String, Range, Fixnum]
# @param tags [Array<String>, String]
# @param dom [Array<String>, String]
# @param active [Boolean] return only pending & waiting tasks
# @return [Process::Status] the exit status of the thread
# @api public
def delete! ids: nil, tags: nil, dom: nil, active: true
f = Helpers.filter(ids: ids, tags: tags, dom: dom)
a = Helpers.pending_or_waiting(active)
return false if f.blank?
Execute.task_popen3(*override_a, f, a, "delete") do |i, o, e, t|
return t.value
end
end
# Directly call `task undo`, which only applies to edits to the task db
# not configuration changes
#
# @api public
def undo!
Execute.task_popen3(*override_a, "undo") do |i, o, e, t|
return t.value
end
end
# Retrieves a hash of hashes with info about the UDAs currently available
#
# @return [Hash{Symbol=>Hash}]
# @api public
def get_udas
udas = {}
taskrc.config.attributes
.select { |attr, val| Helpers.uda_attr? attr }
.sort
.chunk { |attr, val| Helpers.arbitrary_attr attr }
.each do |attr, arr|
uda = arr.map do |pair|
[Helpers.deep_attr(pair[0]), pair[1]]
end
udas[attr.to_sym] = Hash[uda]
end
return udas
end
# Update a configuration variable in the .taskrc
#
# @param attr [String]
# @param val [String]
# @return [Process::Status] the exit status of the thread
# @api public
def update_config! attr, val
Execute.task_popen3(*override_a, "config #{attr} #{val}") do |i, o, e, t|
return t.value
end
end
# Add new found udas to our internal TaskModel
#
# @param uda_hash [Hash{Symbol=>Hash}]
# @param type [Class, nil]
# @param model [Models::TaskModel, Class]
# @api protected
def add_udas_to_model! uda_hash, type=nil, model=Models::TaskModel
uda_hash.each do |attr, val|
val.each do |k, v|
type = Helpers.determine_type(v) if type.nil?
model.attribute attr, type
end
end
end
protected :add_udas_to_model!
# Retrieve an array of the uda names
#
# @return [Array<String>]
# @api public
def get_uda_names
Execute.task_popen3(*@override_a, "_udas") do |i, o, e, t|
return o.read.each_line.map { |l| l.chomp }
end
end
# Checks if a given uda exists in the current task database
#
# @param uda_name [String] the uda name to check for
# @return [Boolean] whether it matches or not
# @api public
def uda_exists? uda_name
if get_udas.any? { |uda| uda == uda_name }
true
else
false
end
end
# Add a UDA to the users config/database
#
# @param name [String]
# @param type [String]
# @param label [String]
# @param values [String]
# @param default [String]
# @param urgency [String]
# @return [Boolean] success
# @api public
def create_uda! name, type: "string", label: nil, values: nil,
default: nil, urgency: nil
label = name if label.nil?
update_config!("uda.#{name}.type", type)
update_config!("uda.#{name}.label", label)
update_config!("uda.#{name}.values", values) unless values.nil?
update_config!("uda.#{name}.default", default) unless default.nil?
update_config!("uda.#{name}.urgency", urgency) unless urgency.nil?
end
# Sync the local TaskWarrior database changes to the remote databases.
# Remotes need to be configured in the .taskrc.
#
# @example
# # make some local changes with add!, modify!, or the like
# tw.sync!
#
# @return [Process::Status] the exit status of the thread
# @api public
def sync!
Execute.task_popen3(*override_a, "sync") do |i, o, e, t|
return t.value
end
end
# TODO: implement and test convenience methods for modifying tasks
#
# def annotate
# end
#
# def denotate
# end
#
# def append
# end
#
# def prepend
# end
end
|
sunspot/sunspot | sunspot/lib/sunspot/text_field_setup.rb | Sunspot.TextFieldSetup.field | ruby | def field(name)
fields = @setup.text_fields(name)
if fields
if fields.length == 1
fields.first
else
raise(
Sunspot::UnrecognizedFieldError,
"The text field with name #{name} has incompatible configurations for the classes #{@setup.type_names.join(', ')}"
)
end
end
end | :nodoc:
Return a text field with the given name. Duck-type compatible with
Setup and CompositeSetup, but return text fields instead. | train | https://github.com/sunspot/sunspot/blob/31dd76cd7a14a4ef7bd541de97483d8cd72ff685/sunspot/lib/sunspot/text_field_setup.rb#L15-L27 | class TextFieldSetup #:nodoc:
def initialize(setup)
@setup = setup
end
#
# Return a text field with the given name. Duck-type compatible with
# Setup and CompositeSetup, but return text fields instead.
#
end
|
wvanbergen/request-log-analyzer | lib/request_log_analyzer/source/log_parser.rb | RequestLogAnalyzer::Source.LogParser.update_current_request | ruby | def update_current_request(request_data, &block) # :yields: request
if alternative_header_line?(request_data)
if @current_request
@current_request << request_data
else
@current_request = @file_format.request(request_data)
end
elsif header_line?(request_data)
if @current_request
case options[:parse_strategy]
when 'assume-correct'
handle_request(@current_request, &block)
@current_request = @file_format.request(request_data)
when 'cautious'
@skipped_lines += 1
warn(:unclosed_request, "Encountered header line (#{request_data[:line_definition].name.inspect}), but previous request was not closed!")
@current_request = nil # remove all data that was parsed, skip next request as well.
end
elsif footer_line?(request_data)
handle_request(@file_format.request(request_data), &block)
else
@current_request = @file_format.request(request_data)
end
else
if @current_request
@current_request << request_data
if footer_line?(request_data)
handle_request(@current_request, &block) # yield @current_request
@current_request = nil
end
else
@skipped_lines += 1
warn(:no_current_request, "Parseable line (#{request_data[:line_definition].name.inspect}) found outside of a request!")
end
end
end | Combines the different lines of a request into a single Request object. It will start a
new request when a header line is encountered en will emit the request when a footer line
is encountered.
Combining the lines is done using heuristics. Problems can occur in this process. The
current parse strategy defines how these cases are handled.
When using the 'assume-correct' parse strategy (default):
- Every line that is parsed before a header line is ignored as it cannot be included in
any request. It will emit a :no_current_request warning.
- If a header line is found before the previous requests was closed, the previous request
will be yielded and a new request will be started.
When using the 'cautious' parse strategy:
- Every line that is parsed before a header line is ignored as it cannot be included in
any request. It will emit a :no_current_request warning.
- A header line that is parsed before a request is closed by a footer line, is a sign of
an unproperly ordered file. All data that is gathered for the request until then is
discarded and the next request is ignored as well. An :unclosed_request warning is
emitted.
<tt>request_data</tt>:: A hash of data that was parsed from the last line. | train | https://github.com/wvanbergen/request-log-analyzer/blob/b83865d440278583ac8e4901bb33878244fd7c75/lib/request_log_analyzer/source/log_parser.rb#L285-L320 | class LogParser < Base
include Enumerable
# The maximum number of bytes to read from a line.
DEFAULT_MAX_LINE_LENGTH = 8096
DEFAULT_LINE_DIVIDER = "\n"
# The default parse strategy that will be used to parse the input.
DEFAULT_PARSE_STRATEGY = 'assume-correct'
# All available parse strategies.
PARSE_STRATEGIES = ['cautious', 'assume-correct']
attr_reader :source_files, :current_file, :current_lineno, :processed_files
attr_reader :warnings, :parsed_lines, :parsed_requests, :skipped_lines, :skipped_requests
# Initializes the log file parser instance.
# It will apply the language specific FileFormat module to this instance. It will use the line
# definitions in this module to parse any input that it is given (see parse_io).
#
# <tt>format</tt>:: The current file format instance
# <tt>options</tt>:: A hash of options that are used by the parser
def initialize(format, options = {})
super(format, options)
@warnings = 0
@parsed_lines = 0
@parsed_requests = 0
@skipped_lines = 0
@skipped_requests = 0
@current_request = nil
@current_source = nil
@current_file = nil
@current_lineno = nil
@processed_files = []
@source_files = options[:source_files]
@progress_handler = nil
@warning_handler = nil
@options[:parse_strategy] ||= DEFAULT_PARSE_STRATEGY
unless PARSE_STRATEGIES.include?(@options[:parse_strategy])
fail "Unknown parse strategy: #{@options[@parse_strategy]}"
end
end
def max_line_length
file_format.max_line_length || DEFAULT_MAX_LINE_LENGTH
end
def line_divider
file_format.line_divider || DEFAULT_LINE_DIVIDER
end
# Reads the input, which can either be a file, sequence of files or STDIN to parse
# lines specified in the FileFormat. This lines will be combined into Request instances,
# that will be yielded. The actual parsing occurs in the parse_io method.
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
def each_request(options = {}, &block) # :yields: :request, request
case @source_files
when IO
if @source_files == $stdin
puts 'Parsing from the standard input. Press CTRL+C to finish.' # FIXME: not here
end
parse_stream(@source_files, options, &block)
when String
parse_file(@source_files, options, &block)
when Array
parse_files(@source_files, options, &block)
else
fail 'Unknown source provided'
end
end
# Make sure the Enumerable methods work as expected
alias_method :each, :each_request
# Parses a list of subsequent files of the same format, by calling parse_file for every
# file in the array.
# <tt>files</tt>:: The Array of files that should be parsed
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
def parse_files(files, options = {}, &block) # :yields: request
files.each { |file| parse_file(file, options, &block) }
end
# Check if a file has a compressed extention in the filename.
# If recognized, return the command string used to decompress the file
def decompress_file?(filename)
nice_command = 'nice -n 5'
return "#{nice_command} gunzip -c -d #{filename}" if filename.match(/\.tar.gz$/) || filename.match(/\.tgz$/) || filename.match(/\.gz$/)
return "#{nice_command} bunzip2 -c -d #{filename}" if filename.match(/\.bz2$/)
return "#{nice_command} unzip -p #{filename}" if filename.match(/\.zip$/)
''
end
# Parses a log file. Creates an IO stream for the provided file, and sends it to parse_io for
# further handling. This method supports progress updates that can be used to display a progressbar
#
# If the logfile is compressed, it is uncompressed to stdout and read.
# TODO: Check if IO.popen encounters problems with the given command line.
# TODO: Fix progress bar that is broken for IO.popen, as it returns a single string.
#
# <tt>file</tt>:: The file that should be parsed.
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
def parse_file(file, options = {}, &block)
if File.directory?(file)
parse_files(Dir["#{ file }/*"], options, &block)
return
end
@current_source = File.expand_path(file)
@source_changes_handler.call(:started, @current_source) if @source_changes_handler
if decompress_file?(file).empty?
@progress_handler = @dormant_progress_handler
@progress_handler.call(:started, file) if @progress_handler
File.open(file, 'rb') { |f| parse_io(f, options, &block) }
@progress_handler.call(:finished, file) if @progress_handler
@progress_handler = nil
@processed_files.push(@current_source.dup)
else
IO.popen(decompress_file?(file), 'rb') { |f| parse_io(f, options, &block) }
end
@source_changes_handler.call(:finished, @current_source) if @source_changes_handler
@current_source = nil
end
# Parses an IO stream. It will simply call parse_io. This function does not support progress updates
# because the length of a stream is not known.
# <tt>stream</tt>:: The IO stream that should be parsed.
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
def parse_stream(stream, options = {}, &block)
parse_io(stream, options, &block)
end
# Parses a string. It will simply call parse_io. This function does not support progress updates.
# <tt>string</tt>:: The string that should be parsed.
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
def parse_string(string, options = {}, &block)
parse_io(StringIO.new(string), options, &block)
end
# This method loops over each line of the input stream. It will try to parse this line as any of
# the lines that are defined by the current file format (see RequestLogAnalyazer::FileFormat).
# It will then combine these parsed line into requests using heuristics. These requests (see
# RequestLogAnalyzer::Request) will then be yielded for further processing in the pipeline.
#
# - RequestLogAnalyzer::LineDefinition#matches is called to test if a line matches a line definition of the file format.
# - update_current_request is used to combine parsed lines into requests using heuristics.
# - The method will yield progress updates if a progress handler is installed using progress=
# - The method will yield parse warnings if a warning handler is installed using warning=
#
# This is a Ruby 1.9 specific version that offers memory protection.
#
# <tt>io</tt>:: The IO instance to use as source
# <tt>options</tt>:: A hash of options that can be used by the parser.
def parse_io_19(io, options = {}, &block) # :yields: request
@max_line_length = options[:max_line_length] || max_line_length
@line_divider = options[:line_divider] || line_divider
@current_lineno = 0
while line = io.gets(@line_divider, @max_line_length)
@current_lineno += 1
@progress_handler.call(:progress, io.pos) if @progress_handler && @current_lineno % 255 == 0
parse_line(line, &block)
end
warn(:unfinished_request_on_eof, 'End of file reached, but last request was not completed!') unless @current_request.nil?
@current_lineno = nil
end
# This method loops over each line of the input stream. It will try to parse this line as any of
# the lines that are defined by the current file format (see RequestLogAnalyazer::FileFormat).
# It will then combine these parsed line into requests using heuristics. These requests (see
# RequestLogAnalyzer::Request) will then be yielded for further processing in the pipeline.
#
# - RequestLogAnalyzer::LineDefinition#matches is called to test if a line matches a line definition of the file format.
# - update_current_request is used to combine parsed lines into requests using heuristics.
# - The method will yield progress updates if a progress handler is installed using progress=
# - The method will yield parse warnings if a warning handler is installed using warning=
#
# This is a Ruby 1.8 specific version that doesn't offer memory protection.
#
# <tt>io</tt>:: The IO instance to use as source
# <tt>options</tt>:: A hash of options that can be used by the parser.
def parse_io_18(io, options = {}, &block) # :yields: request
@line_divider = options[:line_divider] || line_divider
@current_lineno = 0
while line = io.gets(@line_divider)
@current_lineno += 1
@progress_handler.call(:progress, io.pos) if @progress_handler && @current_lineno % 255 == 0
parse_line(line, &block)
end
warn(:unfinished_request_on_eof, 'End of file reached, but last request was not completed!') unless @current_request.nil?
@current_lineno = nil
end
alias_method :parse_io, RUBY_VERSION.to_f < 1.9 ? :parse_io_18 : :parse_io_19
# Parses a single line using the current file format. If successful, use the parsed
# information to build a request
# <tt>line</tt>:: The line to parse
# <tt>block</tt>:: The block to send fully parsed requests to.
def parse_line(line, &block) # :yields: request
if request_data = file_format.parse_line(line) { |wt, message| warn(wt, message) }
@parsed_lines += 1
update_current_request(request_data.merge(source: @current_source, lineno: @current_lineno), &block)
end
end
# Add a block to this method to install a progress handler while parsing.
# <tt>proc</tt>:: The proc that will be called to handle progress update messages
def progress=(proc)
@dormant_progress_handler = proc
end
# Add a block to this method to install a warning handler while parsing,
# <tt>proc</tt>:: The proc that will be called to handle parse warning messages
def warning=(proc)
@warning_handler = proc
end
# Add a block to this method to install a source change handler while parsing,
# <tt>proc</tt>:: The proc that will be called to handle source changes
def source_changes=(proc)
@source_changes_handler = proc
end
# This method is called by the parser if it encounteres any parsing problems.
# It will call the installed warning handler if any.
#
# By default, RequestLogAnalyzer::Controller will install a warning handler
# that will pass the warnings to each aggregator so they can do something useful
# with it.
#
# <tt>type</tt>:: The warning type (a Symbol)
# <tt>message</tt>:: A message explaining the warning
def warn(type, message)
@warnings += 1
@warning_handler.call(type, message, @current_lineno) if @warning_handler
end
protected
# Combines the different lines of a request into a single Request object. It will start a
# new request when a header line is encountered en will emit the request when a footer line
# is encountered.
#
# Combining the lines is done using heuristics. Problems can occur in this process. The
# current parse strategy defines how these cases are handled.
#
# When using the 'assume-correct' parse strategy (default):
# - Every line that is parsed before a header line is ignored as it cannot be included in
# any request. It will emit a :no_current_request warning.
# - If a header line is found before the previous requests was closed, the previous request
# will be yielded and a new request will be started.
#
# When using the 'cautious' parse strategy:
# - Every line that is parsed before a header line is ignored as it cannot be included in
# any request. It will emit a :no_current_request warning.
# - A header line that is parsed before a request is closed by a footer line, is a sign of
# an unproperly ordered file. All data that is gathered for the request until then is
# discarded and the next request is ignored as well. An :unclosed_request warning is
# emitted.
#
# <tt>request_data</tt>:: A hash of data that was parsed from the last line.
# Handles the parsed request by sending it into the pipeline.
#
# - It will call RequestLogAnalyzer::Request#validate on the request instance
# - It will send the request into the pipeline, checking whether it was accepted by all the filters.
# - It will update the parsed_requests and skipped_requests variables accordingly
#
# <tt>request</tt>:: The parsed request instance (RequestLogAnalyzer::Request)
def handle_request(request, &_block) # :yields: :request, request
@parsed_requests += 1
request.validate
accepted = block_given? ? yield(request) : true
@skipped_requests += 1 unless accepted
end
# Checks whether a given line hash is an alternative header line according to the current file format.
# <tt>hash</tt>:: A hash of data that was parsed from the line.
def alternative_header_line?(hash)
hash[:line_definition].header == :alternative
end
# Checks whether a given line hash is a header line according to the current file format.
# <tt>hash</tt>:: A hash of data that was parsed from the line.
def header_line?(hash)
hash[:line_definition].header == true
end
# Checks whether a given line hash is a footer line according to the current file format.
# <tt>hash</tt>:: A hash of data that was parsed from the line.
def footer_line?(hash)
hash[:line_definition].footer
end
end
|
forward3d/rbhive | lib/rbhive/t_c_l_i_connection.rb | RBHive.TCLIConnection.explain | ruby | def explain(query)
rows = []
fetch_in_batch("EXPLAIN " + query) do |batch|
rows << batch.map { |b| b[:Explain] }
end
ExplainResult.new(rows.flatten)
end | Performs a explain on the supplied query on the server, returns it as a ExplainResult.
(Only works on 0.12 if you have this patch - https://issues.apache.org/jira/browse/HIVE-5492) | train | https://github.com/forward3d/rbhive/blob/a630b57332f2face03501da3ecad2905c78056fa/lib/rbhive/t_c_l_i_connection.rb#L310-L316 | class TCLIConnection
attr_reader :client
def initialize(server, port = 10_000, options = {}, logger = StdOutLogger.new)
options ||= {} # backwards compatibility
raise "'options' parameter must be a hash" unless options.is_a?(Hash)
if options[:transport] == :sasl and options[:sasl_params].nil?
raise ":transport is set to :sasl, but no :sasl_params option was supplied"
end
# Defaults to buffered transport, Hive 0.10, 1800 second timeout
options[:transport] ||= :buffered
options[:hive_version] ||= 10
options[:timeout] ||= 1800
@options = options
# Look up the appropriate Thrift protocol version for the supplied Hive version
@thrift_protocol_version = thrift_hive_protocol(options[:hive_version])
@logger = logger
@transport = thrift_transport(server, port)
@protocol = Thrift::BinaryProtocol.new(@transport)
@client = Hive2::Thrift::TCLIService::Client.new(@protocol)
@session = nil
@logger.info("Connecting to HiveServer2 #{server} on port #{port}")
end
def thrift_hive_protocol(version)
HIVE_THRIFT_MAPPING[version] || raise("Invalid Hive version")
end
def thrift_transport(server, port)
@logger.info("Initializing transport #{@options[:transport]}")
case @options[:transport]
when :buffered
return Thrift::BufferedTransport.new(thrift_socket(server, port, @options[:timeout]))
when :sasl
return Thrift::SaslClientTransport.new(thrift_socket(server, port, @options[:timeout]),
parse_sasl_params(@options[:sasl_params]))
when :http
return Thrift::HTTPClientTransport.new("http://#{server}:#{port}/cliservice")
else
raise "Unrecognised transport type '#{transport}'"
end
end
def thrift_socket(server, port, timeout)
socket = Thrift::Socket.new(server, port)
socket.timeout = timeout
socket
end
# Processes SASL connection params and returns a hash with symbol keys or a nil
def parse_sasl_params(sasl_params)
# Symbilize keys in a hash
if sasl_params.kind_of?(Hash)
return sasl_params.inject({}) do |memo,(k,v)|
memo[k.to_sym] = v;
memo
end
end
return nil
end
def open
@transport.open
end
def close
@transport.close
end
def open_session
@session = @client.OpenSession(prepare_open_session(@thrift_protocol_version))
end
def close_session
@client.CloseSession prepare_close_session
@session = nil
end
def session
@session && @session.sessionHandle
end
def client
@client
end
def execute(query)
@logger.info("Executing Hive Query: #{query}")
req = prepare_execute_statement(query)
exec_result = client.ExecuteStatement(req)
raise_error_if_failed!(exec_result)
exec_result
end
def priority=(priority)
set("mapred.job.priority", priority)
end
def queue=(queue)
set("mapred.job.queue.name", queue)
end
def set(name,value)
@logger.info("Setting #{name}=#{value}")
self.execute("SET #{name}=#{value}")
end
# Async execute
def async_execute(query)
@logger.info("Executing query asynchronously: #{query}")
exec_result = @client.ExecuteStatement(
Hive2::Thrift::TExecuteStatementReq.new(
sessionHandle: @session.sessionHandle,
statement: query,
runAsync: true
)
)
raise_error_if_failed!(exec_result)
op_handle = exec_result.operationHandle
# Return handles to get hold of this query / session again
{
session: @session.sessionHandle,
guid: op_handle.operationId.guid,
secret: op_handle.operationId.secret
}
end
# Is the query complete?
def async_is_complete?(handles)
async_state(handles) == :finished
end
# Is the query actually running?
def async_is_running?(handles)
async_state(handles) == :running
end
# Has the query failed?
def async_is_failed?(handles)
async_state(handles) == :error
end
def async_is_cancelled?(handles)
async_state(handles) == :cancelled
end
def async_cancel(handles)
@client.CancelOperation(prepare_cancel_request(handles))
end
# Map states to symbols
def async_state(handles)
response = @client.GetOperationStatus(
Hive2::Thrift::TGetOperationStatusReq.new(operationHandle: prepare_operation_handle(handles))
)
case response.operationState
when Hive2::Thrift::TOperationState::FINISHED_STATE
return :finished
when Hive2::Thrift::TOperationState::INITIALIZED_STATE
return :initialized
when Hive2::Thrift::TOperationState::RUNNING_STATE
return :running
when Hive2::Thrift::TOperationState::CANCELED_STATE
return :cancelled
when Hive2::Thrift::TOperationState::CLOSED_STATE
return :closed
when Hive2::Thrift::TOperationState::ERROR_STATE
return :error
when Hive2::Thrift::TOperationState::UKNOWN_STATE
return :unknown
when Hive2::Thrift::TOperationState::PENDING_STATE
return :pending
when nil
raise "No operation state found for handles - has the session been closed?"
else
return :state_not_in_protocol
end
end
# Async fetch results from an async execute
def async_fetch(handles, max_rows = 100)
# Can't get data from an unfinished query
unless async_is_complete?(handles)
raise "Can't perform fetch on a query in state: #{async_state(handles)}"
end
# Fetch and
fetch_rows(prepare_operation_handle(handles), :first, max_rows)
end
# Performs a query on the server, fetches the results in batches of *batch_size* rows
# and yields the result batches to a given block as arrays of rows.
def async_fetch_in_batch(handles, batch_size = 1000, &block)
raise "No block given for the batch fetch request!" unless block_given?
# Can't get data from an unfinished query
unless async_is_complete?(handles)
raise "Can't perform fetch on a query in state: #{async_state(handles)}"
end
# Now let's iterate over the results
loop do
rows = fetch_rows(prepare_operation_handle(handles), :next, batch_size)
break if rows.empty?
yield rows
end
end
def async_close_session(handles)
validate_handles!(handles)
@client.CloseSession(Hive2::Thrift::TCloseSessionReq.new( sessionHandle: handles[:session] ))
end
# Pull rows from the query result
def fetch_rows(op_handle, orientation = :first, max_rows = 1000)
fetch_req = prepare_fetch_results(op_handle, orientation, max_rows)
fetch_results = @client.FetchResults(fetch_req)
raise_error_if_failed!(fetch_results)
rows = fetch_results.results.rows
TCLIResultSet.new(rows, TCLISchemaDefinition.new(get_schema_for(op_handle), rows.first))
end
# Performs a explain on the supplied query on the server, returns it as a ExplainResult.
# (Only works on 0.12 if you have this patch - https://issues.apache.org/jira/browse/HIVE-5492)
# Performs a query on the server, fetches up to *max_rows* rows and returns them as an array.
def fetch(query, max_rows = 100)
# Execute the query and check the result
exec_result = execute(query)
raise_error_if_failed!(exec_result)
# Get search operation handle to fetch the results
op_handle = exec_result.operationHandle
# Fetch the rows
fetch_rows(op_handle, :first, max_rows)
end
# Performs a query on the server, fetches the results in batches of *batch_size* rows
# and yields the result batches to a given block as arrays of rows.
def fetch_in_batch(query, batch_size = 1000, &block)
raise "No block given for the batch fetch request!" unless block_given?
# Execute the query and check the result
exec_result = execute(query)
raise_error_if_failed!(exec_result)
# Get search operation handle to fetch the results
op_handle = exec_result.operationHandle
# Prepare fetch results request
fetch_req = prepare_fetch_results(op_handle, :next, batch_size)
# Now let's iterate over the results
loop do
rows = fetch_rows(op_handle, :next, batch_size)
break if rows.empty?
yield rows
end
end
def create_table(schema)
execute(schema.create_table_statement)
end
def drop_table(name)
name = name.name if name.is_a?(TableSchema)
execute("DROP TABLE `#{name}`")
end
def replace_columns(schema)
execute(schema.replace_columns_statement)
end
def add_columns(schema)
execute(schema.add_columns_statement)
end
def method_missing(meth, *args)
client.send(meth, *args)
end
private
def prepare_open_session(client_protocol)
req = ::Hive2::Thrift::TOpenSessionReq.new( @options[:sasl_params].nil? ? [] : @options[:sasl_params] )
req.client_protocol = client_protocol
req
end
def prepare_close_session
::Hive2::Thrift::TCloseSessionReq.new( sessionHandle: self.session )
end
def prepare_execute_statement(query)
::Hive2::Thrift::TExecuteStatementReq.new( sessionHandle: self.session, statement: query.to_s, confOverlay: {} )
end
def prepare_fetch_results(handle, orientation=:first, rows=100)
orientation_value = "FETCH_#{orientation.to_s.upcase}"
valid_orientations = ::Hive2::Thrift::TFetchOrientation::VALUE_MAP.values
unless valid_orientations.include?(orientation_value)
raise ArgumentError, "Invalid orientation: #{orientation.inspect}"
end
orientation_const = eval("::Hive2::Thrift::TFetchOrientation::#{orientation_value}")
::Hive2::Thrift::TFetchResultsReq.new(
operationHandle: handle,
orientation: orientation_const,
maxRows: rows
)
end
def prepare_operation_handle(handles)
validate_handles!(handles)
Hive2::Thrift::TOperationHandle.new(
operationId: Hive2::Thrift::THandleIdentifier.new(guid: handles[:guid], secret: handles[:secret]),
operationType: Hive2::Thrift::TOperationType::EXECUTE_STATEMENT,
hasResultSet: false
)
end
def prepare_cancel_request(handles)
Hive2::Thrift::TCancelOperationReq.new(
operationHandle: prepare_operation_handle(handles)
)
end
def validate_handles!(handles)
unless handles.has_key?(:guid) and handles.has_key?(:secret) and handles.has_key?(:session)
raise "Invalid handles hash: #{handles.inspect}"
end
end
def get_schema_for(handle)
req = ::Hive2::Thrift::TGetResultSetMetadataReq.new( operationHandle: handle )
metadata = client.GetResultSetMetadata( req )
metadata.schema
end
# Raises an exception if given operation result is a failure
def raise_error_if_failed!(result)
return if result.status.statusCode == 0
error_message = result.status.errorMessage || 'Execution failed!'
raise RBHive::TCLIConnectionError.new(error_message)
end
end
|
Sage/fudge | lib/fudge/cli.rb | Fudge.Cli.init | ruby | def init
generator = Fudge::Generator.new(Dir.pwd)
msg = generator.write_fudgefile
shell.say msg
end | Initalizes the blank Fudgefile | train | https://github.com/Sage/fudge/blob/2a22b68f5ea96409b61949a503c6ee0b6d683920/lib/fudge/cli.rb#L8-L12 | class Cli < Thor
desc "init", "Initialize a blank Fudgefile"
# Initalizes the blank Fudgefile
desc "build [BUILD_NAME]",
"Run a build with the given name (default: 'default')"
method_option :callbacks, :type => :boolean, :default => false
method_option :time, :type => :boolean, :default => false
# Runs the parsed builds
# @param [String] build_name the given build to run (default 'default')
def build(build_name='default')
description = Fudge::Parser.new.parse('Fudgefile')
Fudge::Runner.new(description).run_build(build_name, options)
end
desc "list [FILTER]",
"List all builds defined in the Fudgefile that match FILTER (default: list all)"
# Lists builds defined in Fudgefile, with optional filtering.
#
# If given no filter, all builds are listed. If given a filter,
# lists builds whose names match the filter. Matching is based on
# sub-string matching and is case insensitive.
#
# The listing includes the build name, followed by the about
# string if one was specified in the Fudgefile.
#
# @param [String] filter a string by which to filter the builds listed
def list(filter="")
description = Fudge::Parser.new.parse('Fudgefile')
builds = description.builds.map { |name, build| ["#{name}", build.about] }
matches = builds.select { |name, _| name =~ /#{filter}/i }
shell.print_table(matches, :indent => 2, :truncate => true)
end
end
|
projectcypress/health-data-standards | lib/hqmf-parser/2.0/data_criteria_helpers/dc_base_extract.rb | HQMF2.DataCriteriaBaseExtractions.all_subset_operators | ruby | def all_subset_operators
@entry.xpath('./*/cda:excerpt', HQMF2::Document::NAMESPACES).collect do |subset_operator|
SubsetOperator.new(subset_operator)
end
end | Extracts all subset operators contained in the entry xml | train | https://github.com/projectcypress/health-data-standards/blob/252d4f0927c513eacde6b9ea41b76faa1423c34b/lib/hqmf-parser/2.0/data_criteria_helpers/dc_base_extract.rb#L56-L60 | class DataCriteriaBaseExtractions
include HQMF2::Utilities
CONJUNCTION_CODE_TO_DERIVATION_OP = {
'OR' => 'UNION',
'AND' => 'XPRODUCT'
}
def initialize(entry)
@entry = entry
end
# Extract the local variable name (held in the value of the localVariableName element)
def extract_local_variable_name
lvn = @entry.at_xpath('./cda:localVariableName')
lvn['value'] if lvn
end
# Generate a list of child criterias
def extract_child_criteria
@entry.xpath("./*/cda:outboundRelationship[@typeCode='COMP']/cda:criteriaReference/cda:id",
HQMF2::Document::NAMESPACES).collect do |ref|
Reference.new(ref).id
end.compact
end
# Extracts the derivation operator to be used by the data criteria, and fails out if it finds more than one (should
# not be valid)
def extract_derivation_operator
codes = @entry.xpath("./*/cda:outboundRelationship[@typeCode='COMP']/cda:conjunctionCode/@code",
HQMF2::Document::NAMESPACES)
codes.inject(nil) do |d_op, code|
if d_op && d_op != CONJUNCTION_CODE_TO_DERIVATION_OP[code.value]
fail 'More than one derivation operator in data criteria'
end
CONJUNCTION_CODE_TO_DERIVATION_OP[code.value]
end
end
def extract_temporal_references
@entry.xpath('./*/cda:temporallyRelatedInformation', HQMF2::Document::NAMESPACES).collect do |temporal_reference|
TemporalReference.new(temporal_reference)
end
end
# Filters all the subset operators to only include the ones of type 'UNION' and 'XPRODUCT'
def extract_subset_operators
all_subset_operators.select do |operator|
operator.type != 'UNION' && operator.type != 'XPRODUCT'
end
end
# Extracts all subset operators contained in the entry xml
def extract_template_ids
@entry.xpath('./*/cda:templateId/cda:item', HQMF2::Document::NAMESPACES).collect do |template_def|
HQMF2::Utilities.attr_val(template_def, '@root')
end
end
# Extract the negation (and the negation_code_list_id if appropriate)
def extract_negation
negation = (attr_val('./*/@actionNegationInd').to_s.downcase == 'true')
negation_code_list_id = nil
if negation
res = @entry.at_xpath('./*/cda:outboundRelationship/*/cda:code[@code="410666004"]/../cda:value/@valueSet',
HQMF2::Document::NAMESPACES)
negation_code_list_id = res.value if res
end
[negation, negation_code_list_id]
end
end
|
murb/workbook | lib/workbook/row.rb | Workbook.Row.+ | ruby | def +(row)
rv = super(row)
rv = Workbook::Row.new(rv) unless rv.class == Workbook::Row
return rv
end | plus
@param [Workbook::Row, Array] row to add
@return [Workbook::Row] a new row, not linked to the table | train | https://github.com/murb/workbook/blob/2e12f43c882b7c235455192a2fc48183fe6ec965/lib/workbook/row.rb#L82-L86 | class Row < Array
include Workbook::Modules::Cache
alias_method :compare_without_header, :<=>
attr_accessor :placeholder # The placeholder attribute is used in compares (corresponds to newly created or removed lines (depending which side you're on)
attr_accessor :format
# Initialize a new row
#
# @param [Workbook::Row, Array<Workbook::Cell>, Array] cells list of cells to initialize the row with, default is empty
# @param [Workbook::Table] table a row normally belongs to a table, reference it here
# @param [Hash] options Supported options: parse_cells_on_batch_creation (parse cell values during row-initalization, default: false), cell_parse_options (default {}, see Workbook::Modules::TypeParser)
def initialize cells=[], table=nil, options={}
options=options ? {:parse_cells_on_batch_creation=>false,:cell_parse_options=>{},:clone_cells=>false}.merge(options) : {}
cells = [] if cells==nil
self.table= table
cells.each do |c|
if c.is_a? Workbook::Cell
c = c.clone if options[:clone_cells]
else
c = Workbook::Cell.new(c, {row:self})
c.parse!(options[:cell_parse_options]) if options[:parse_cells_on_batch_creation]
end
push c
end
end
# An internal function used in diffs
#
# @return [Boolean] returns true when this row is not an actual row, but a placeholder row to 'compare' against
def placeholder?
placeholder ? true : false
end
# Returns the table this row belongs to
#
# @return [Workbook::Table] the table this row belongs to
def table
@table if defined?(@table)
end
# Set reference to the table this row belongs to without adding the row to the table
#
# @param [Workbook::Table] t the table this row belongs to
def set_table(t)
@table = t
end
# Set reference to the table this row belongs to and add the row to this table
#
# @param [Workbook::Table] t the table this row belongs to
def table= t
raise ArgumentError, "table should be a Workbook::Table (you passed a #{t.class})" unless t.is_a?(Workbook::Table) or t == nil
if t
@table = t
table.push(self) #unless table.index(self) and self.placeholder?
end
end
# Add cell
# @param [Workbook::Cell, Numeric,String,Time,Date,TrueClass,FalseClass,NilClass] cell or value to add
def push(cell)
cell = Workbook::Cell.new(cell, {row:self}) unless cell.class == Workbook::Cell
super(cell)
end
# Add cell
# @param [Workbook::Cell, Numeric,String,Time,Date,TrueClass,FalseClass,NilClass] cell or value to add
def <<(cell)
cell = Workbook::Cell.new(cell, {row:self}) unless cell.class == Workbook::Cell
super(cell)
end
# plus
# @param [Workbook::Row, Array] row to add
# @return [Workbook::Row] a new row, not linked to the table
# concat
# @param [Workbook::Row, Array] row to add
# @return [self] self
def concat(row)
row = Workbook::Row.new(row) unless row.class == Workbook::Row
super(row)
end
# Overrides normal Array's []-function with support for symbols that identify a column based on the header-values and / or
#
# @example Lookup using fixnum or header value encoded as symbol
# row[1] #=> <Cell value="a">
# row["A"] #=> <Cell value="a">
# row[:a] #=> <Cell value="a">
#
# @param [Fixnum, Symbol, String] index_or_hash that identifies the column (strings are converted to symbols)
# @return [Workbook::Cell, nil]
def [](index_or_hash)
if index_or_hash.is_a? Symbol
rv = nil
begin
rv = to_hash[index_or_hash]
rescue NoMethodError
end
return rv
elsif index_or_hash.is_a? String and index_or_hash.match(/^[A-Z]*$/)
# it looks like a column indicator
return to_a[Workbook::Column.alpha_index_to_number_index(index_or_hash)]
elsif index_or_hash.is_a? String
symbolized = Workbook::Cell.new(index_or_hash, {row:self}).to_sym
self[symbolized]
else
if index_or_hash
return to_a[index_or_hash]
end
end
end
# Overrides normal Array's []=-function with support for symbols that identify a column based on the header-values
#
# @example Lookup using fixnum or header value encoded as symbol (strings are converted to symbols)
# row[1] #=> <Cell value="a">
# row[:a] #=> <Cell value="a">
#
# @param [Fixnum, Symbol, String] index_or_hash that identifies the column
# @param [String, Fixnum, NilClass, Date, DateTime, Time, Float] value
# @return [Workbook::Cell, nil]
def []= index_or_hash, value
index = index_or_hash
if index_or_hash.is_a? Symbol
index = table_header_keys.index(index_or_hash)
elsif index_or_hash.is_a? String and index_or_hash.match(/^[A-Z]*$/)
# it looks like a column indicator
index = Workbook::Column.alpha_index_to_number_index(index_or_hash)
elsif index_or_hash.is_a? String
symbolized = Workbook::Cell.new(index_or_hash, {row:self}).to_sym
index = table_header_keys.index(symbolized)
end
value_celled = Workbook::Cell.new
if value.is_a? Workbook::Cell
value_celled = value
else
current_cell = self[index]
if current_cell.is_a? Workbook::Cell
value_celled = current_cell
end
value_celled.value=(value)
end
value_celled.row = self
super(index,value_celled)
end
# Returns an array of cells allows you to find cells by a given color, normally a string containing a hex
#
# @param [String] color a CSS-style hex-string
# @param [Hash] options Option :hash_keys (default true) returns row as an array of symbols
# @return [Array<Symbol>, Workbook::Row<Workbook::Cell>]
def find_cells_by_background_color color=:any, options={}
options = {:hash_keys=>true}.merge(options)
cells = self.collect {|c| c if c.format.has_background_color?(color) }.compact
r = Row.new cells
options[:hash_keys] ? r.to_symbols : r
end
# Returns true when the row belongs to a table and it is the header row (typically the first row)
#
# @return [Boolean]
def header?
table != nil and self.object_id == table_header.object_id
end
# Is this the first row in the table
#
# @return [Boolean, NilClass] returns nil if it doesn't belong to a table, false when it isn't the first row of a table and true when it is.
def first?
table != nil and self.object_id == table.first.object_id
end
# Returns true when all the cells in the row have values whose to_s value equals an empty string
#
# @return [Boolean]
def no_values?
all? {|c| c.value.to_s == ''}
end
# Converts a row to an array of symbol representations of the row content, see also: Workbook::Cell#to_sym
# @return [Array<Symbol>] returns row as an array of symbols
def to_symbols
fetch_cache(:to_symbols){
collect{|c| c.to_sym}
}
end
# Converts the row to an array of Workbook::Cell's
# @return [Array<Workbook::Cell>] returns row as an array of symbols
def to_a
self.collect{|c| c}
end
def table_header
table.header
end
def table_header_keys
table_header.to_symbols
end
# Returns a hash representation of this row
#
# @return [Hash]
def to_hash
keys = table_header_keys
values = self
hash = {}
keys.each_with_index {|k,i| hash[k]=values[i]}
return hash
end
# Quick assessor to the book's template, if it exists
#
# @return [Workbook::Template]
def template
table.template if table
end
# Returns a hash representation of this row
#
# it differs from #to_hash as it doesn't contain the Workbook's Workbook::Cell-objects,
# but the actual values contained in these cells
#
# @return [Hash]
def to_hash_with_values
keys = table_header_keys
values = self
@hash_with_values = {}
keys.each_with_index {|k,i| v=values[i]; v=v.value if v; @hash_with_values[k]=v}
return @hash_with_values
end
# Compares one row wiht another
#
# @param [Workbook::Row] other row to compare against
# @return [Workbook::Row] a row with the diff result.
def <=> other
a = self.header? ? 0 : 1
b = other.header? ? 0 : 1
return (a <=> b) if (a==0 or b==0)
compare_without_header other
end
# The first cell of the row is considered to be the key
#
# @return [Workbook::Cell] the key cell
def key
first
end
# Compact detaches the row from the table
def compact
r = self.clone
r = r.collect{|c| c unless c.nil?}.compact
end
# clone the row with together with the cells
#
# @return [Workbook::Row] a cloned copy of self with cells
def clone
Workbook::Row.new(self, nil, {:clone_cells=>true})
end
# remove all the trailing nil-cells (returning a trimmed clone)
#
# @param [Integer] desired_length of the new row
# @return [Workbook::Row] a trimmed clone of the array
def trim(desired_length=nil)
self.clone.trim!(desired_length)
end
# remove all the trailing nil-cells (returning a trimmed self)
#
# @param [Integer] desired_length of the new row
# @return [Workbook::Row] self
def trim!(desired_length=nil)
self_count = self.count-1
self.count.times do |index|
index = self_count - index
if desired_length and index < desired_length
break
elsif desired_length and index >= desired_length
self.delete_at(index)
elsif self[index].nil?
self.delete_at(index)
else
break
end
end
(desired_length - self.count).times{|a| self << (Workbook::Cell.new(nil))} if desired_length and (desired_length - self.count) > 0
self
end
end
|
jferris/effigy | lib/effigy/view.rb | Effigy.View.html | ruby | def html(selector, inner_html)
select(selector).each do |node|
node.inner_html = inner_html
end
end | Replaces the contents of the selected elements with live markup.
@param [String] selector a CSS or XPath string describing the elements to
transform
@param [String] inner_html the new contents of the selected elements. Markup is
@example
html('p', '<b>Welcome!</b>')
find('p').html('<b>Welcome!</b>') | train | https://github.com/jferris/effigy/blob/366ad3571b0fc81f681472eb0ae911f47c60a405/lib/effigy/view.rb#L155-L159 | class View
# Replaces the text content of the selected elements.
#
# Markup in the given content is escaped. Use {#html} if you want to
# replace the contents with live markup.
#
# @param [String] selector a CSS or XPath string describing the elements to
# transform
# @param [String] content the text that should be the new contents
# @example
# text('h1', 'a title')
# find('h1').text('a title')
# text('p', '<b>title</b>') # <p><b>title</title></p>
def text(selector, content)
select(selector).each do |node|
node.content = content
end
end
# Adds or updates the given attribute or attributes of the selected elements.
#
# @param [String] selector a CSS or XPath string describing the elements to
# transform
# @param [String,Hash] attributes_or_attribute_name if a String, replaces
# that attribute with the given value. If a Hash, uses the keys as
# attribute names and values as attribute values
# @param [String] value the value for the replaced attribute. Used only if
# attributes_or_attribute_name is a String
# @example
# attr('p', :id => 'an_id', :style => 'display: none')
# attr('p', :id, 'an_id')
# find('p').attr(:id, 'an_id')
def attr(selector, attributes_or_attribute_name, value = nil)
attributes = attributes_or_attribute_name.to_effigy_attributes(value)
select(selector).each do |element|
element.merge!(attributes)
end
end
# Replaces the selected elements with a clone for each item in the
# collection. If multiple elements are selected, only the first element
# will be used for cloning. All selected elements will be removed.
#
# @param [String] selector a CSS or XPath string describing the elements to
# transform
# @param [Enumerable] collection the items that are the base for each
# cloned element
# @example
# titles = %w(one two three)
# find('.post').replace_each(titles) do |title|
# text('h1', title)
# end
def replace_each(selector, collection, &block)
selected_elements = select(selector)
ExampleElementTransformer.new(self, selected_elements).replace_each(collection, &block)
end
# Perform transformations on a string containing an html fragment.
#
# @yield inside the given block, transformation methods such as #text and
# #html can be used on the template. Using a subclass, you can instead
# override the #transform method, which is the preferred approach.
#
# @return [String] the transformed fragment
def render_html_fragment(template, &block)
yield_transform_and_output(Nokogiri::HTML.fragment(template), &block)
end
# Perform transformations on a string containing an html document.
#
# @yield inside the given block, transformation methods such as #text and
# #html can be used on the template. Using a subclass, you can instead
# override the #transform method, which is the preferred approach.
#
# @return [String] the transformed document
def render_html_document(template, &block)
yield_transform_and_output(Nokogiri::HTML.parse(template), &block)
end
# Removes the selected elements from the template.
#
# @param [String] selector a CSS or XPath string describing the element to
# transform
# @example
# remove('.post')
# find('.post').remove
def remove(selector)
select(selector).each { |element| element.unlink }
end
# Adds the given class names to the selected elements.
#
# @param [String] selector a CSS or XPath string describing the elements to
# transform
# @param [String] class_names a CSS class name that should be added
# @example
# add_class('a#home', 'selected')
# find('a#home').add_class('selected')
def add_class(selector, *class_names)
select(selector).each do |element|
class_list = ClassList.new(element)
class_list.add class_names
end
end
# Removes the given class names from the selected elements.
#
# Ignores class names that are not present.
#
# @param [String] selector a CSS or XPath string describing the elements to
# transform
# @param [String] class_names a CSS class name that should be removed
# @example
# remove_class('a#home', 'selected')
# find('a#home').remove_class('selected')
def remove_class(selector, *class_names)
select(selector).each do |element|
class_list = ClassList.new(element)
class_list.remove(class_names)
end
end
# Replaces the contents of the selected elements with live markup.
#
# @param [String] selector a CSS or XPath string describing the elements to
# transform
# @param [String] inner_html the new contents of the selected elements. Markup is
# @example
# html('p', '<b>Welcome!</b>')
# find('p').html('<b>Welcome!</b>')
# Replaces the selected element with live markup.
#
# The "outer HTML" for the selected tag itself is also replaced.
#
# @param [String] selector a CSS or XPath string describing the element to
# transform
# @param [String] html the new markup to replace the selected element. Markup is
# not escaped.
def replace_with(selector, html)
select(selector).after(html).unlink
end
# Adds the given markup to the end of the selected elements.
#
# @param [String] selector a CSS or XPath string describing the elements to
# which this HTML should be appended
# @param [String] html_to_append the new markup to append to the selected
# element. Markup is not escaped.
def append(selector, html_to_append)
select(selector).each { |node| node.append_fragment html_to_append }
end
# Selects an element or elements for chained transformation.
#
# If given a block, the selection will be in effect during the block.
#
# If not given a block, a {Selection} will be returned on which
# transformation methods can be called. Any methods called on the
# Selection will be delegated back to the view with the selector inserted
# into the parameter list.
#
# @param [String] selector a CSS or XPath string describing the element to
# transform
# @return [Selection] a proxy object on which transformation methods can be
# called
# @example
# find('.post') do
# text('h1', post.title)
# text('p', post.body)
# end
# find('h1').text(post.title).add_class('active')
def find(selector)
if block_given?
old_context = @current_context
@current_context = select(selector)
yield
@current_context = old_context
else
Selection.new(self, selector)
end
end
alias_method :f, :find
# Called by render methods to perform transformations on the source
# template. Override this method in subclasses to perform the
# transformations specific to your view.
#
# @example
# class PostView < Effigy::View
# def initialize(post)
# @post = post
# end
#
# def transform
# find('.post') do
# find('h2').text(post.title)
# find('p').text(post.body)
# end
# end
# end
def transform
end
private
# The current set of nodes on which transformations are performed.
#
# This is usually the entire document, but will be a subset of child nodes
# during {#find} blocks.
attr_reader :current_context
# Returns a set of nodes matching the given selector, or the nodes
# themselves if given a set of nodes.
#
# @param nodes [String,Nokogiri::HTML::NodeSet] if a String, the selector to
# use when determining the current context. When a NodeSet, the set of
# nodes that should be returned.
# @return [Nokogiri::HTML::NodeSet] the nodes selected by the given selector
# or node set.
def select(nodes)
if nodes.respond_to?(:search)
nodes
else
current_context.search(nodes)
end
end
# Clones an element, sets it as the current context, and yields to the
# given block with the given item.
#
# @param [Nokogiri::HTML::Element] the element to clone
# @param [Object] item the item that should be yielded to the block
# @yield [Object] the item passed as item
# @return [Nokogiri::HTML::Element] the clone of the original element
def clone_element_with_item(original_element, item, &block)
item_element = original_element.dup
find(item_element) { yield(item) }
item_element
end
# Converts the transformed document to a string.
#
# Called by render methods after transforming the document using a passed
# block and {#transform}.
#
# Override this in subclasses if you wish to return something besides an
# XHTML string representation of the transformed document.
#
# @return [String] the transformed document as a string
def output
current_context.to_html
end
# Uses the given document or fragment as a basis for transformation.
#
# @yield self, with the document or fragment set as the context.
#
# @return [String] the transformed document or fragment as a string
def yield_transform_and_output(document_or_fragment)
@current_context = document_or_fragment
yield if block_given?
transform
output
end
end
|
thelabtech/questionnaire | app/models/qe/element.rb | Qe.Element.duplicate | ruby | def duplicate(page, parent = nil)
new_element = self.class.new(self.attributes)
case parent.class.to_s
when ChoiceField
new_element.conditional_id = parent.id
when QuestionGrid, QuestionGridWithTotal
new_element.question_grid_id = parent.id
end
new_element.save(:validate => false)
PageElement.create(:element => new_element, :page => page) unless parent
# duplicate children
if respond_to?(:elements) && elements.present?
elements.each {|e| e.duplicate(page, new_element)}
end
new_element
end | copy an item and all it's children | train | https://github.com/thelabtech/questionnaire/blob/02eb47cbcda8cca28a5db78e18623d0957aa2c9b/app/models/qe/element.rb#L116-L133 | class Element < ActiveRecord::Base
# self.table_name = "#{self.table_name}"
belongs_to :question_grids, :foreign_key => "question_grid_id"
belongs_to :choice_fields, :foreign_key => "conditional_id"
self.inheritance_column = :kind
has_many :page_elements, :dependent => :destroy
has_many :pages, :through => :page_elements
# TODO rework with namespacing.
scope :active, select("distinct(#{Qe.table_name_prefix}elements.id), #{Qe.table_name_prefix}elements.*").where(QuestionSheet.table_name + '.archived' => false).joins({:pages => :question_sheet})
belongs_to :question_sheet
validates_presence_of :kind, :style
# validates_presence_of :label, :style, :on => :update
validates_length_of :kind, :style, :maximum => 40, :allow_nil => true
# validates_length_of :label, :maximum => 255, :allow_nil => true
# TODO: This needs to get abstracted out to a CustomQuestion class in BOAT
validates_inclusion_of :kind, :in => %w{
Qe::Section
Qe::Paragraph
Qe::TextField
Qe::ChoiceField
Qe::DateField
Qe::FileField
Qe::SchoolPicker
Qe::ProjectPreference
Qe::StateChooser
Qe::QuestionGrid
Qe::QuestionGridWithTotal
Qe::AttachmentField
Qe::ReferenceQuestion
Qe::PaymentQuestion
} # leaf classes
before_validation :set_defaults, :on => :create
attr_accessible :attribute_name,
:cols,
:conditional_id,
:content,
:created_at,
:css_class,
:css_id,
:hide_label,
:hide_option_labels,
:is_confidential,
:kind,
:label,
:max_length,
:no_cache,
:object_name,
:position,
:question_grid_id,
:related_question_sheet_id,
:required,
:slug,
:source,
:style,
:text_xpath,
:tooltip,
:total_cols,
:updated_at,
:value_xpath
# HUMANIZED_ATTRIBUTES = {
# :slug => "Variable"
# }
#
# def self.human_attrib_name(attr)
# HUMANIZED_ATTRIBUTES[attr.to_sym] || super
# end
def has_response?(answer_sheet = nil)
false
end
def required?(answer_sheet = nil)
super()
end
def position(page = nil)
if page
page_elements.where(:page_id => page.id).first.try(:position)
else
self[:position]
end
end
def set_position(position, page = nil)
if page
pe = page_elements.where(:page_id => page.id).first
pe.update_attribute(:position, position) if pe
else
self[:position] = position
end
position
end
def question?
self.kind_of?(Question)
end
# by default the partial for an element matches the class name (override as necessary)
def ptemplate
self.class.to_s.underscore
end
# copy an item and all it's children
# include nested elements
def all_elements
if respond_to?(:elements)
(elements + elements.collect(&:all_elements)).flatten
else
[]
end
end
def reuseable?
(self.is_a?(Question) || self.is_a?(QuestionGrid) || self.is_a?(QuestionGridWithTotal))
end
def Element.max_label_length
@@max_label_length ||= Element.columns.find{ |c| c.name == "label" }.limit
end
protected
def set_defaults
if self.content.blank?
case self.class.to_s.demodulize
when "ChoiceField" then self.content ||= "Choice One\nChoice Two\nChoice Three"
when "Paragraph" then self.content ||="Lorem ipsum..."
end
end
if self.style.blank?
case self.class.to_s
when 'TextField' then self.style ||= 'qe/essay'
when 'DateField' then self.style ||= 'qe/date'
when 'FileField' then self.style ||= 'qe/file'
when 'Paragraph' then self.style ||= 'qe/paragraph'
when 'Section' then self.style ||= 'qe/section'
when 'ChoiceField' then self.style = 'qe/checkbox'
when 'QuestionGrid' then self.style ||= 'qe/grid'
when 'QuestionGridWithTotal' then self.style ||= 'qe/grid_with_total'
when 'SchoolPicker' then self.style ||= 'qe/school_picker'
when 'ProjectPreference' then self.style ||= 'qe/project_preference'
when 'StateChooser' then self.style ||= 'qe/state_chooser'
when 'ReferenceQuestion' then self.style ||= 'qe/peer'
else
self.style ||= self.class.to_s.underscore
end
end
end
end
|
hashicorp/vagrant | lib/vagrant/machine_index.rb | Vagrant.MachineIndex.delete | ruby | def delete(entry)
return true if !entry.id
@lock.synchronize do
with_index_lock do
return true if !@machines[entry.id]
# If we don't have the lock, then we need to acquire it.
if !@machine_locks[entry.id]
raise "Unlocked delete on machine: #{entry.id}"
end
# Reload so we have the latest data, then delete and save
unlocked_reload
@machines.delete(entry.id)
unlocked_save
# Release access on this machine
unlocked_release(entry.id)
end
end
true
end | Initializes a MachineIndex at the given file location.
@param [Pathname] data_dir Path to the directory where data for the
index can be stored. This folder should exist and must be writable.
Deletes a machine by UUID.
The machine being deleted with this UUID must either be locked
by this index or must be unlocked.
@param [Entry] entry The entry to delete.
@return [Boolean] true if delete is successful | train | https://github.com/hashicorp/vagrant/blob/c22a145c59790c098f95d50141d9afb48e1ef55f/lib/vagrant/machine_index.rb#L64-L87 | class MachineIndex
include Enumerable
# Initializes a MachineIndex at the given file location.
#
# @param [Pathname] data_dir Path to the directory where data for the
# index can be stored. This folder should exist and must be writable.
def initialize(data_dir)
@data_dir = data_dir
@index_file = data_dir.join("index")
@lock = Monitor.new
@machines = {}
@machine_locks = {}
with_index_lock do
unlocked_reload
end
end
# Deletes a machine by UUID.
#
# The machine being deleted with this UUID must either be locked
# by this index or must be unlocked.
#
# @param [Entry] entry The entry to delete.
# @return [Boolean] true if delete is successful
# Iterate over every machine in the index. The yielded {Entry} objects
# will NOT be locked, so you'll have to call {#get} manually to acquire
# the lock on them.
def each(reload=false)
if reload
@lock.synchronize do
with_index_lock do
unlocked_reload
end
end
end
@machines.each do |uuid, data|
yield Entry.new(uuid, data.merge("id" => uuid))
end
end
# Accesses a machine by UUID and returns a {MachineIndex::Entry}
#
# The entry returned is locked and can't be read again or updated by
# this process or any other. To unlock the machine, call {#release}
# with the entry.
#
# You can only {#set} an entry (update) when the lock is held.
#
# @param [String] uuid UUID for the machine to access.
# @return [MachineIndex::Entry]
def get(uuid)
entry = nil
@lock.synchronize do
with_index_lock do
# Reload the data
unlocked_reload
data = find_by_prefix(uuid)
return nil if !data
uuid = data["id"]
entry = Entry.new(uuid, data)
# Lock this machine
lock_file = lock_machine(uuid)
if !lock_file
raise Errors::MachineLocked,
name: entry.name,
provider: entry.provider
end
@machine_locks[uuid] = lock_file
end
end
entry
end
# Tests if the index has the given UUID.
#
# @param [String] uuid
# @return [Boolean]
def include?(uuid)
@lock.synchronize do
with_index_lock do
unlocked_reload
return !!find_by_prefix(uuid)
end
end
end
# Releases an entry, unlocking it.
#
# This is an idempotent operation. It is safe to call this even if you're
# unsure if an entry is locked or not.
#
# After calling this, the previous entry should no longer be used.
#
# @param [Entry] entry
def release(entry)
@lock.synchronize do
unlocked_release(entry.id)
end
end
# Creates/updates an entry object and returns the resulting entry.
#
# If the entry was new (no UUID), then the UUID will be set on the
# resulting entry and can be used. Additionally, the a lock will
# be created for the resulting entry, so you must {#release} it
# if you want others to be able to access it.
#
# If the entry isn't new (has a UUID). then this process must hold
# that entry's lock or else this set will fail.
#
# @param [Entry] entry
# @return [Entry]
def set(entry)
# Get the struct and update the updated_at attribute
struct = entry.to_json_struct
# Set an ID if there isn't one already set
id = entry.id
@lock.synchronize do
with_index_lock do
# Reload so we have the latest machine data. This allows other
# processes to update their own machines without conflicting
# with our own.
unlocked_reload
# If we don't have a machine ID, try to look one up
if !id
self.each do |other|
if entry.name == other.name &&
entry.provider == other.provider &&
entry.vagrantfile_path.to_s == other.vagrantfile_path.to_s
id = other.id
break
end
end
# If we still don't have an ID, generate a random one
id = SecureRandom.uuid.gsub("-", "") if !id
# Get a lock on this machine
lock_file = lock_machine(id)
if !lock_file
raise "Failed to lock new machine: #{entry.name}"
end
@machine_locks[id] = lock_file
end
if !@machine_locks[id]
raise "Unlocked write on machine: #{id}"
end
# Set our machine and save
@machines[id] = struct
unlocked_save
end
end
Entry.new(id, struct)
end
protected
# Finds a machine where the UUID is prefixed by the given string.
#
# @return [Hash]
def find_by_prefix(prefix)
@machines.each do |uuid, data|
return data.merge("id" => uuid) if uuid.start_with?(prefix)
end
nil
end
# Locks a machine exclusively to us, returning the file handle
# that holds the lock.
#
# If the lock cannot be acquired, then nil is returned.
#
# This should be called within an index lock.
#
# @return [File]
def lock_machine(uuid)
lock_path = @data_dir.join("#{uuid}.lock")
lock_file = lock_path.open("w+")
if lock_file.flock(File::LOCK_EX | File::LOCK_NB) === false
lock_file.close
lock_file = nil
end
lock_file
end
# Releases a local lock on a machine. This does not acquire any locks
# so make sure to lock around it.
#
# @param [String] id
def unlocked_release(id)
lock_file = @machine_locks[id]
if lock_file
lock_file.close
begin
File.delete(lock_file.path)
rescue Errno::EACCES
# Another process is probably opened it, no problem.
end
@machine_locks.delete(id)
end
end
# This will reload the data without locking the index. It is assumed
# the caller with lock the index outside of this call.
#
# @param [File] f
def unlocked_reload
return if !@index_file.file?
data = nil
begin
data = JSON.load(@index_file.read)
rescue JSON::ParserError
raise Errors::CorruptMachineIndex, path: @index_file.to_s
end
if data
if !data["version"] || data["version"].to_i != 1
raise Errors::CorruptMachineIndex, path: @index_file.to_s
end
@machines = data["machines"] || {}
end
end
# Saves the index.
def unlocked_save
@index_file.open("w") do |f|
f.write(JSON.dump({
"version" => 1,
"machines" => @machines,
}))
end
end
# This will hold a lock to the index so it can be read or updated.
def with_index_lock
lock_path = "#{@index_file}.lock"
File.open(lock_path, "w+") do |f|
f.flock(File::LOCK_EX)
yield
end
end
# An entry in the MachineIndex.
class Entry
# The unique ID for this entry. This is _not_ the ID for the
# machine itself (which is provider-specific and in the data directory).
#
# @return [String]
attr_reader :id
# The path for the "local data" directory for the environment.
#
# @return [Pathname]
attr_accessor :local_data_path
# The name of the machine.
#
# @return [String]
attr_accessor :name
# The name of the provider.
#
# @return [String]
attr_accessor :provider
# The last known state of this machine.
#
# @return [String]
attr_accessor :state
# The valid Vagrantfile filenames for this environment.
#
# @return [Array<String>]
attr_accessor :vagrantfile_name
# The path to the Vagrantfile that manages this machine.
#
# @return [Pathname]
attr_accessor :vagrantfile_path
# The last time this entry was updated.
#
# @return [DateTime]
attr_reader :updated_at
# Extra data to store with the index entry. This can be anything
# and is treated like a general global state bag.
#
# @return [Hash]
attr_accessor :extra_data
# Initializes an entry.
#
# The parameter given should be nil if this is being created
# publicly.
def initialize(id=nil, raw=nil)
@extra_data = {}
# Do nothing if we aren't given a raw value. Otherwise, parse it.
return if !raw
@id = id
@local_data_path = raw["local_data_path"]
@name = raw["name"]
@provider = raw["provider"]
@state = raw["state"]
@vagrantfile_name = raw["vagrantfile_name"]
@vagrantfile_path = raw["vagrantfile_path"]
# TODO(mitchellh): parse into a proper datetime
@updated_at = raw["updated_at"]
@extra_data = raw["extra_data"] || {}
# Be careful with the paths
@local_data_path = nil if @local_data_path == ""
@vagrantfile_path = nil if @vagrantfile_path == ""
# Convert to proper types
@local_data_path = Pathname.new(@local_data_path) if @local_data_path
@vagrantfile_path = Pathname.new(@vagrantfile_path) if @vagrantfile_path
end
# Returns boolean true if this entry appears to be valid.
# The criteria for being valid:
#
# * Vagrantfile directory exists
# * Vagrant environment contains a machine with this
# name and provider.
#
# This method is _slow_. It should be used with care.
#
# @param [Pathname] home_path The home path for the Vagrant
# environment.
# @return [Boolean]
def valid?(home_path)
return false if !vagrantfile_path
return false if !vagrantfile_path.directory?
# Create an environment so we can determine the active
# machines...
found = false
env = vagrant_env(home_path)
env.active_machines.each do |name, provider|
if name.to_s == self.name.to_s &&
provider.to_s == self.provider.to_s
found = true
break
end
end
# If an active machine of the same name/provider was not
# found, it is already false.
return false if !found
# Get the machine
machine = nil
begin
machine = env.machine(self.name.to_sym, self.provider.to_sym)
rescue Errors::MachineNotFound
return false
end
# Refresh the machine state
return false if machine.state.id == MachineState::NOT_CREATED_ID
true
end
# Creates a {Vagrant::Environment} for this entry.
#
# @return [Vagrant::Environment]
def vagrant_env(home_path, **opts)
Vagrant::Util::SilenceWarnings.silence! do
Environment.new({
cwd: @vagrantfile_path,
home_path: home_path,
local_data_path: @local_data_path,
vagrantfile_name: @vagrantfile_name,
}.merge(opts))
end
end
# Converts to the structure used by the JSON
def to_json_struct
{
"local_data_path" => @local_data_path.to_s,
"name" => @name,
"provider" => @provider,
"state" => @state,
"vagrantfile_name" => @vagrantfile_name,
"vagrantfile_path" => @vagrantfile_path.to_s,
"updated_at" => @updated_at,
"extra_data" => @extra_data,
}
end
end
end
|
NCSU-Libraries/lentil | lib/lentil/instagram_harvester.rb | Lentil.InstagramHarvester.leave_image_comment | ruby | def leave_image_comment(image, comment)
configure_comment_connection
Instagram.client.create_media_comment(image.external_identifier, comment)
end | Leave a comment containing the donor agreement on an Instagram image
@param image [type] An Image model object from the Instagram service
@raise [Exception] If a comment submission fails
@authenticated true
@return [Hashie::Mash] Instagram response | train | https://github.com/NCSU-Libraries/lentil/blob/c31775447a52db1781c05f6724ae293698527fe6/lib/lentil/instagram_harvester.rb#L275-L278 | class InstagramHarvester
#
# Configure the Instagram class in preparation requests.
#
# @options opts [String] :client_id (Lentil::Engine::APP_CONFIG["instagram_client_id"]) The Instagram client ID
# @options opts [String] :client_secret (Lentil::Engine::APP_CONFIG["instagram_client_secret"]) The Instagram client secret
# @options opts [String] :access_token (nil) The optional Instagram client ID
def configure_connection(opts = {})
opts['client_id'] ||= Lentil::Engine::APP_CONFIG["instagram_client_id"]
opts['client_secret'] ||= Lentil::Engine::APP_CONFIG["instagram_client_secret"]
opts['access_token'] ||= Lentil::Engine::APP_CONFIG["instagram_access_token"] || nil
Instagram.configure do |config|
config.client_id = opts['client_id']
config.client_secret = opts['client_secret']
if (opts['access_token'])
config.access_token = opts['access_token']
end
end
end
#
# Configure the Instagram class in preparation for leaving comments
#
# @param access_token = nil [String] Instagram access token for the writing account
def configure_comment_connection(access_token = nil)
access_token ||= Lentil::Engine::APP_CONFIG["instagram_access_token"] || nil
raise "instagram_access_token must be defined as a parameter or in the application config" unless access_token
configure_connection({'access_token' => access_token})
end
# Queries the Instagram API for recent images with a given tag.
#
# @param [String] tag The tag to query by
#
# @return [Hashie::Mash] The data returned by Instagram API
def fetch_recent_images_by_tag(tag = nil)
configure_connection
tag ||= Lentil::Engine::APP_CONFIG["default_image_search_tag"]
Instagram.tag_recent_media(tag, :count=>10)
end
# Queries the Instagram API for the image metadata associated with a given ID.
#
# @param [String] image_id Instagram image ID
#
# @return [Hashie::Mash] data returned by Instagram API
def fetch_image_by_id(image_id)
configure_connection
Instagram.media_item(image_id)
end
# Retrieves an image OEmbed metadata from the public URL using the Instagram OEmbed service
#
# @param url [String] The public Instagram image URL
#
# @return [String] the Instagram image OEmbed data
def retrieve_oembed_data_from_url(url)
OEmbed::Providers::Instagram.get(url)
end
# Retrieves image metadata via the public URL and imports it
#
# @param url [String] The public Instagram image URL
#
# @return [Array] new image objects
def save_image_from_url(url)
save_instagram_load(fetch_image_by_id(retrieve_oembed_data_from_url(url).fields["media_id"]))
end
# Produce processed image metadata from Instagram metadata.
# This metadata is accepted by the save_image method.
#
# @param [Hashie::Mash] instagram_metadata The single image metadata returned by Instagram API
#
# @return [Hash] processed image metadata
def extract_image_data(instagram_metadata)
{
url: instagram_metadata.link,
external_id: instagram_metadata.id,
large_url: instagram_metadata.images.standard_resolution.url,
name: instagram_metadata.caption && instagram_metadata.caption.text,
tags: instagram_metadata.tags,
user: instagram_metadata.user,
original_datetime: Time.at(instagram_metadata.created_time.to_i).to_datetime,
original_metadata: instagram_metadata,
media_type: instagram_metadata.type,
video_url: instagram_metadata.videos && instagram_metadata.videos.standard_resolution.url
}
end
# Takes return from Instagram API gem and adds image,
# users, and tags to the database.
#
# @raise [DuplicateImageError] This method does not accept duplicate external image IDs
#
# @param [Hash] image_data processed Instagram image metadata
#
# @return [Image] new Image object
def save_image(image_data)
instagram_service = Lentil::Service.where(:name => "Instagram").first
user_record = instagram_service.users.where(:user_name => image_data[:user][:username]).
first_or_create!({:full_name => image_data[:user][:full_name], :bio => image_data[:user][:bio]})
raise DuplicateImageError, "Duplicate image identifier" unless user_record.
images.where(:external_identifier => image_data[:external_id]).first.nil?
image_record = user_record.images.build({
:external_identifier => image_data[:external_id],
:description => image_data[:name],
:url => image_data[:url],
:long_url => image_data[:large_url],
:video_url => image_data[:video_url],
:original_datetime => image_data[:original_datetime],
:media_type => image_data[:media_type]
})
image_record.original_metadata = image_data[:original_metadata].to_hash
# Default to "All Rights Reserved" until we find out more about licenses
# FIXME: Set the default license in the app config
unless image_record.licenses.size > 0
image_record.licenses << Lentil::License.where(:short_name => "ARR").first
end
image_data[:tags].each {|tag| image_record.tags << Lentil::Tag.where(:name => tag).first_or_create}
user_record.save!
image_record.save!
image_record
end
# Takes return from Instagram API gem and adds all new images,
# users, and tags to the database.
#
# @param [Hashie::Mash] instagram_load The content returned by the Instagram gem
# @param [Boolean] raise_dupes Whether to raise exceptions for duplicate images
#
# @raise [DuplicateImageError] If there are duplicate images and raise_dupes is true
#
# @return [Array] New image objects
def save_instagram_load(instagram_load, raise_dupes=false)
# Handle collections of images and individual images
images = instagram_load
if !images.kind_of?(Array)
images = [images]
end
images.collect {|image|
begin
save_image(extract_image_data(image))
rescue DuplicateImageError => e
raise e if raise_dupes
next
rescue => e
Rails.logger.error e.message
puts e.message
pp image
next
end
}.compact
end
#
# Call save_instagram_load, but raise exceptions for duplicates.
#
# @param [Hashie::Mash] instagram_load The content returned by the Instagram gem
#
# @raise [DuplicateImageError] If there are duplicate images
#
# @return [Array] New image objects
def save_instagram_load!(instagram_load)
save_instagram_load(instagram_load, true)
end
#
# Retrieve the binary image data for a given Image object
#
# @param [Image] image An Image model object from the Instagram service
#
# @raise [Exception] If there are request problems
#
# @return [String] Binary image data
def harvest_image_data(image)
response = Typhoeus.get(image.large_url(false), followlocation: true)
if response.success?
raise "Invalid content type: " + response.headers['Content-Type'] unless (response.headers['Content-Type'] == 'image/jpeg')
elsif response.timed_out?
raise "Request timed out"
elsif response.code == 0
raise "Could not get an HTTP response"
else
raise "HTTP request failed: " + response.code.to_s
end
response.body
end
#
# Retrieve the binary video data for a given Image object
#
# @param [Image] image An Image model object from the Instagram service
#
# @raise [Exception] If there are request problems
#
# @return [String] Binary video data
def harvest_video_data(image)
response = Typhoeus.get(image.video_url, followlocation: true)
if response.success?
raise "Invalid content type: " + response.headers['Content-Type'] unless (response.headers['Content-Type'] == 'video/mp4')
elsif response.timed_out?
raise "Request timed out"
elsif response.code == 0
raise "Could not get an HTTP response"
else
raise "HTTP request failed: " + response.code.to_s
end
response.body
end
#
# Test if an image is still avaiable
#
# @param [Image] image An Image model object from the Instagram service
#
# @raise [Exception] If there are request problems
#
# @return [Boolean] Whether the image request was successful
def test_remote_image(image)
response = Typhoeus.get(image.thumbnail_url(false), followlocation: true)
if response.success?
true
elsif response.timed_out? || (response.code == 0)
nil
else
false
end
end
#
# Leave a comment containing the donor agreement on an Instagram image
#
# @param image [type] An Image model object from the Instagram service
#
# @raise [Exception] If a comment submission fails
# @authenticated true
#
# @return [Hashie::Mash] Instagram response
end
|
marcinwyszynski/statefully | lib/statefully/state.rb | Statefully.State.respond_to_missing? | ruby | def respond_to_missing?(name, _include_private = false)
str_name = name.to_s
key?(name.to_sym) || %w[? !].any?(&str_name.method(:end_with?)) || super
end | Companion to `method_missing`
This method reeks of :reek:BooleanParameter.
@param name [Symbol|String]
@param _include_private [Boolean]
@return [Boolean]
@api private | train | https://github.com/marcinwyszynski/statefully/blob/affca50625a26229e1af7ee30f2fe12bf9cddda9/lib/statefully/state.rb#L295-L298 | class State
include Enumerable
extend Forwardable
# Return the previous {State}
#
# @return [State]
# @api public
# @example
# Statefully::State.create.previous
# => #<Statefully::State::None>
#
# Statefully::State.create.succeed.previous
# => #<Statefully::State::Success>
attr_reader :previous
# @!method each
# @return [Enumerator]
# @see https://docs.ruby-lang.org/en/2.0.0/Hash.html#method-i-each Hash#each
# @api public
# @example
# Statefully::State.create(key: 'val').each { |key, val| puts("#{key} => #{val}") }
# key => val
# @!method fetch
# @return [Object]
# @see https://docs.ruby-lang.org/en/2.0.0/Hash.html#method-i-fetch Hash#fetch
# @api public
# @example
# Statefully::State.create(key: 'val').fetch(:key)
# => 'val'
# @!method key?
# @return [Boolean]
# @see https://docs.ruby-lang.org/en/2.0.0/Hash.html#method-i-key-3F Hash#key?
# @api public
# @example
# state = Statefully::State.create(key: 'val')
# state.key?(:key)
# => true
# state.key?(:other)
# => false
# @!method keys
# @return [Array<Symbol>]
# @see https://docs.ruby-lang.org/en/2.0.0/Hash.html#method-i-keys Hash#keys
# @api public
# @example
# Statefully::State.create(key: 'val').keys
# => [:key]
def_delegators :@_members, :each, :fetch, :key?, :keys
# Create an instance of {State} object
#
# This is meant as the only valid way of creating {State} objects.
#
# @param values [Hash<Symbol, Object>] keyword arguments
#
# @return [State::Success] new successful State
# @api public
# @example
# Statefully::State.create(key: 'val')
# => #<Statefully::State::Success key="val">
def self.create(**values)
base = { correlation_id: SecureRandom.uuid }
Success.send(:new, base.merge(values), previous: None.instance).freeze
end
# Return all States that came before
#
# @return [Array<State>]
# @api public
# @example
# state = Statefully::State.create
# => [#<Statefully::State::None>]
def ancestry
[previous] + previous.ancestry
end
# Return a {Diff} between current and previous {State}
#
# @return [Diff]
# @api public
# @example
# Statefully::State.create.succeed(key: 'val').diff
# => #<Statefully::Diff::Changed added={key: "val"}>
def diff
Diff.create(current: self, previous: previous)
end
# Return all historical changes to this {State}
#
# @return [Array<Diff>]
# @api public
# @example
# Statefully::State.create.succeed(key: 'val').history
# => [#<Statefully::Diff::Changed added={key: "val"}>, #<Statefully::Diff::Created>]
def history
([diff] + previous.history).freeze
end
# Check if the current {State} is successful
#
# @return [Boolean]
# @api public
# @example
# state = Statefully::State.create
# state.successful?
# => true
#
# state.fail(RuntimeError.new('Boom!')).successful?
# => false
def successful?
true
end
# Check if the current {State} is failed
#
# @return [Boolean]
# @api public
# @example
# state = Statefully::State.create
# state.failed?
# => false
#
# state.fail(RuntimeError.new('Boom!')).failed?
# => true
def failed?
!successful?
end
# Check if the current {State} is finished
#
# @return [Boolean]
# @api public
# @example
# state = Statefully::State.create
# state.finished?
# => false
#
# state.finish.finished?
# => true
def finished?
false
end
# Check if the current {State} is none (a null-object of {State})
#
# @return [Boolean]
# @api public
# @example
# state = Statefully::State.create
# state.none?
# => false
#
# state.previous.none?
# => true
def none?
false
end
# Resolve the current {State}
#
# Resolving will return the current {State} if successful, but raise an
# error wrapped in a {State::Failure}. This is a convenience method inspired
# by monadic composition from functional languages.
#
# @return [State] if the receiver is {#successful?}
# @raise [StandardError] if the receiver is {#failed?}
# @api public
# @example
# Statefully::State.create(key: 'val').resolve
# => #<Statefully::State::Success key="val">
#
# Statefully::State.create.fail(RuntimeError.new('Boom!')).resolve
# RuntimeError: Boom!
# [STACK TRACE]
def resolve
self
end
# Show the current {State} in a human-readable form
#
# @return [String]
# @api public
# @example
# Statefully::State.create(key: 'val')
# => #<Statefully::State::Success key="val">
def inspect
_inspect_details({})
end
private
# State fields
#
# @return [Hash]
# @api private
attr_reader :_members
# Constructor for the {State} object
#
# @param values [Hash<Symbol, Object>] values to store
# @param previous [State] previous {State}
#
# @return [State]
# @api private
def initialize(values, previous:)
@_members = values.freeze
@previous = previous
end
private_class_method :new
# Inspect {State} fields, with extras
#
# @param extras [Hash] Non-member values to include
#
# @return [String]
# @api private
def _inspect_details(extras)
details = [self.class.name]
fields = _members.merge(extras)
details << Inspect.from_fields(fields) unless fields.empty?
"#<#{details.join(' ')}>"
end
# Dynamically pass unknown messages to the underlying state storage
#
# State fields become accessible through readers, like in an
# {http://ruby-doc.org/stdlib-2.0.0/libdoc/ostruct/rdoc/OpenStruct.html OpenStruct}.
# A single state field can be questioned for existence by having its name
# followed by a question mark - eg. bacon?.
# A single state field can be force-accessed by having its name followed by
# an exclamation mark - eg. bacon!.
#
# This method reeks of :reek:TooManyStatements.
#
# @param name [Symbol|String]
# @param args [Array<Object>]
# @param block [Proc]
#
# @return [Object]
# @raise [NoMethodError]
# @raise [Errors::StateMissing]
# @api private
# @example
# state = Statefully::State.create(bacon: 'tasty')
#
# state.bacon
# => "tasty"
#
# state.bacon?
# => true
#
# state.bacon!
# => "tasty"
#
# state.cabbage
# NoMethodError: undefined method `cabbage' for #<Statefully::State::Success bacon="tasty">
# [STACK TRACE]
#
# state.cabbage?
# => false
#
# state.cabbage!
# Statefully::Errors::StateMissing: field 'cabbage' missing from state
# [STACK TRACE]
def method_missing(name, *args, &block)
sym_name = name.to_sym
return fetch(sym_name) if key?(sym_name)
str_name = name.to_s
modifier = str_name[-1]
return super unless %w[? !].include?(modifier)
base = str_name[0...-1].to_sym
known = key?(base)
return known if modifier == '?'
return fetch(base) if known
raise Errors::StateMissing, base
end
# Companion to `method_missing`
#
# This method reeks of :reek:BooleanParameter.
#
# @param name [Symbol|String]
# @param _include_private [Boolean]
#
# @return [Boolean]
# @api private
# {None} is a null-value of {State}
class None < State
include Singleton
# Base case - {None} state does not have any ancestry
#
# @return [Array<State>]
# @api public
# @example
# Statefully::State::None.instance.ancestry
# => []
def ancestry
[]
end
# Return all historical changes to this {State}
#
# @return [Array<Diff>]
# @api public
# @example
# Statefully::State.create.succeed(key: 'val').history
# => [#<Statefully::Diff::Changed added={key: "val"}>, #<Statefully::Diff::Created>]
def history
[]
end
# Check if the current {State} is none (a null-object of {State})
#
# @return [Boolean]
# @api public
# @example
# state = Statefully::State.create
# state.none?
# => false
#
# state.previous.none?
# => true
def none?
true
end
private
# Constructor for the {None} object
# @api private
def initialize
@_members = {}.freeze
@previous = self
end
end
# {Success} is a not-yet failed {State}.
class Success < State
# Return the next, successful {State} with new values merged in (if any)
#
# @param values [Hash<Symbol, Object>] New values of the {State}
#
# @return [State::Success] new successful {State}
# @api public
# @example
# Statefully::State.create.succeed(key: 'val')
# => #<Statefully::State::Success key="val">
def succeed(**values)
self.class.send(:new, _members.merge(values).freeze, previous: self)
end
# Return the next, failed {State} with a stored error
#
# @param error [StandardError] error to store
#
# @return [State::Failure] new failed {State}
# @api public
# @example
# Statefully::State.create(key: 'val').fail(RuntimeError.new('Boom!'))
# => #<Statefully::State::Failure key="val", error="#<RuntimeError: Boom!>">
def fail(error)
Failure.send(:new, _members, error, previous: self).freeze
end
# Return the next, finished? {State}
#
# @return [State::State] new finished {State}
# @api public
# @example
# Statefully::State.create(key: 'val').finish
# => #<Statefully::State::Finished key="val">
def finish
Finished.send(:new, _members, previous: self).freeze
end
end
# {Failure} is a failed {State}.
class Failure < State
# Error stored in the current {State}
#
# @return [StandardError]
# @api public
# @example
# state = Statefully::State.create(key: 'val').fail(RuntimeError.new('Boom!'))
# state.error
# => #<RuntimeError: Boom!>
attr_reader :error
# Constructor for the {Failure} object
#
# @param values [Hash<Symbol, Object>] fields to be stored
# @param error [StandardError] error to be wrapped
# @param previous [State] previous state
# @api private
def initialize(values, error, previous:)
super(values, previous: previous)
@error = error
end
# Return a {Diff} between current and previous {State}
#
# @return [Diff::Failed]
# @api public
# @example
# state = Statefully::State.create(key: 'val').fail(RuntimeError.new('Boom!'))
# state.diff
# => #<Statefully::Diff::Failed error=#<RuntimeError: Boom!>>
def diff
Diff::Failed.new(error).freeze
end
# Check if the current {State} is successful
#
# @return [Boolean]
# @api public
# @example
# state = Statefully::State.create
# state.successful?
# => true
#
# state.fail(RuntimeError.new('Boom!')).successful?
# => false
def successful?
false
end
# Resolve the current {State}
#
# Resolving will return the current {State} if successful, but raise an
# error wrapped in a {State::Failure}. This is a convenience method inspired
# by monadic composition from functional languages.
#
# @return [State] if the receiver is {#successful?}
# @raise [StandardError] if the receiver is {#failed?}
# @api public
# @example
# Statefully::State.create(key: 'val').resolve
# => #<Statefully::State::Success key="val">
#
# Statefully::State.create.fail(RuntimeError.new('Boom!')).resolve
# RuntimeError: Boom!
# [STACK TRACE]
def resolve
raise error
end
# Show the current {State} in a human-readable form
#
# @return [String]
# @api public
# @example
# Statefully::State.create.fail(RuntimeError.new('Boom!'))
# => #<Statefully::State::Failure error="#<RuntimeError: Boom!>">
def inspect
_inspect_details(error: error.inspect)
end
end
# {Finished} state is a state which is successful, but should not be
# processed any further. This could be useful for things like early returns.
class Finished < State
# Return a {Diff} between current and previous {State}
#
# This method reeks of :reek:UtilityFunction - just implementing an API.
#
# @return [Diff::Finished]
# @api public
# @example
# Statefully::State.create(key: 'val').finish.diff
# => #<Statefully::Diff::Finished>
def diff
Diff::Finished.instance
end
# Check if the current {State} is finished
#
# @return [Boolean]
# @api public
# @example
# state = Statefully::State.create
# state.finished?
# => false
#
# state.finish.finished?
# => true
def finished?
true
end
end
end
|
mongodb/mongoid | lib/mongoid/attributes.rb | Mongoid.Attributes.typed_value_for | ruby | def typed_value_for(key, value)
fields.key?(key) ? fields[key].mongoize(value) : value.mongoize
end | Return the typecasted value for a field.
@example Get the value typecasted.
person.typed_value_for(:title, :sir)
@param [ String, Symbol ] key The field name.
@param [ Object ] value The uncast value.
@return [ Object ] The cast value.
@since 1.0.0 | train | https://github.com/mongodb/mongoid/blob/56976e32610f4c2450882b0bfe14da099f0703f4/lib/mongoid/attributes.rb#L287-L289 | module Attributes
extend ActiveSupport::Concern
include Nested
include Processing
include Readonly
attr_reader :attributes
alias :raw_attributes :attributes
# Determine if an attribute is present.
#
# @example Is the attribute present?
# person.attribute_present?("title")
#
# @param [ String, Symbol ] name The name of the attribute.
#
# @return [ true, false ] True if present, false if not.
#
# @since 1.0.0
def attribute_present?(name)
attribute = read_raw_attribute(name)
!attribute.blank? || attribute == false
rescue ActiveModel::MissingAttributeError
false
end
# Get the attributes that have not been cast.
#
# @example Get the attributes before type cast.
# document.attributes_before_type_cast
#
# @return [ Hash ] The uncast attributes.
#
# @since 3.1.0
def attributes_before_type_cast
@attributes_before_type_cast ||= {}
end
# Does the document have the provided attribute?
#
# @example Does the document have the attribute?
# model.has_attribute?(:name)
#
# @param [ String, Symbol ] name The name of the attribute.
#
# @return [ true, false ] If the key is present in the attributes.
#
# @since 3.0.0
def has_attribute?(name)
attributes.key?(name.to_s)
end
# Does the document have the provided attribute before it was assigned
# and type cast?
#
# @example Does the document have the attribute before it was assigned?
# model.has_attribute_before_type_cast?(:name)
#
# @param [ String, Symbol ] name The name of the attribute.
#
# @return [ true, false ] If the key is present in the
# attributes_before_type_cast.
#
# @since 3.1.0
def has_attribute_before_type_cast?(name)
attributes_before_type_cast.key?(name.to_s)
end
# Read a value from the document attributes. If the value does not exist
# it will return nil.
#
# @example Read an attribute.
# person.read_attribute(:title)
#
# @example Read an attribute (alternate syntax.)
# person[:title]
#
# @param [ String, Symbol ] name The name of the attribute to get.
#
# @return [ Object ] The value of the attribute.
#
# @since 1.0.0
def read_attribute(name)
field = fields[name.to_s]
raw = read_raw_attribute(name)
field ? field.demongoize(raw) : raw
end
alias :[] :read_attribute
# Read a value from the attributes before type cast. If the value has not
# yet been assigned then this will return the attribute's existing value
# using read_raw_attribute.
#
# @example Read an attribute before type cast.
# person.read_attribute_before_type_cast(:price)
#
# @param [ String, Symbol ] name The name of the attribute to get.
#
# @return [ Object ] The value of the attribute before type cast, if
# available. Otherwise, the value of the attribute.
#
# @since 3.1.0
def read_attribute_before_type_cast(name)
attr = name.to_s
if attributes_before_type_cast.key?(attr)
attributes_before_type_cast[attr]
else
read_raw_attribute(attr)
end
end
# Remove a value from the +Document+ attributes. If the value does not exist
# it will fail gracefully.
#
# @example Remove the attribute.
# person.remove_attribute(:title)
#
# @param [ String, Symbol ] name The name of the attribute to remove.
#
# @raise [ Errors::ReadonlyAttribute ] If the field cannot be removed due
# to being flagged as reaodnly.
#
# @since 1.0.0
def remove_attribute(name)
as_writable_attribute!(name) do |access|
_assigning do
attribute_will_change!(access)
delayed_atomic_unsets[atomic_attribute_name(access)] = [] unless new_record?
attributes.delete(access)
end
end
end
# Write a single attribute to the document attribute hash. This will
# also fire the before and after update callbacks, and perform any
# necessary typecasting.
#
# @example Write the attribute.
# person.write_attribute(:title, "Mr.")
#
# @example Write the attribute (alternate syntax.)
# person[:title] = "Mr."
#
# @param [ String, Symbol ] name The name of the attribute to update.
# @param [ Object ] value The value to set for the attribute.
#
# @since 1.0.0
def write_attribute(name, value)
access = database_field_name(name)
if attribute_writable?(access)
_assigning do
validate_attribute_value(access, value)
localized = fields[access].try(:localized?)
attributes_before_type_cast[name.to_s] = value
typed_value = typed_value_for(access, value)
unless attributes[access] == typed_value || attribute_changed?(access)
attribute_will_change!(access)
end
if localized
attributes[access] ||= {}
attributes[access].merge!(typed_value)
else
attributes[access] = typed_value
end
typed_value
end
end
end
alias :[]= :write_attribute
# Allows you to set all the attributes for a particular mass-assignment security role
# by passing in a hash of attributes with keys matching the attribute names
# (which again matches the column names) and the role name using the :as option.
# To bypass mass-assignment security you can use the :without_protection => true option.
#
# @example Assign the attributes.
# person.assign_attributes(:title => "Mr.")
#
# @example Assign the attributes (with a role).
# person.assign_attributes({ :title => "Mr." }, :as => :admin)
#
# @param [ Hash ] attrs The new attributes to set.
#
# @since 2.2.1
def assign_attributes(attrs = nil)
_assigning do
process_attributes(attrs)
end
end
# Writes the supplied attributes hash to the document. This will only
# overwrite existing attributes if they are present in the new +Hash+, all
# others will be preserved.
#
# @example Write the attributes.
# person.write_attributes(:title => "Mr.")
#
# @example Write the attributes (alternate syntax.)
# person.attributes = { :title => "Mr." }
#
# @param [ Hash ] attrs The new attributes to set.
#
# @since 1.0.0
def write_attributes(attrs = nil)
assign_attributes(attrs)
end
alias :attributes= :write_attributes
# Determine if the attribute is missing from the document, due to loading
# it from the database with missing fields.
#
# @example Is the attribute missing?
# document.attribute_missing?("test")
#
# @param [ String ] name The name of the attribute.
#
# @return [ true, false ] If the attribute is missing.
#
# @since 4.0.0
def attribute_missing?(name)
selection = __selected_fields
return false unless selection
field = fields[name]
(selection.values.first == 0 && selection_excluded?(name, selection, field)) ||
(selection.values.first == 1 && !selection_included?(name, selection, field))
end
# Return type-casted attributes.
#
# @example Type-casted attributes.
# document.typed_attributes
#
# @return [ Object ] The hash with keys and values of the type-casted attributes.
#
# @since 6.1.0
def typed_attributes
attribute_names.map { |name| [name, send(name)] }.to_h
end
private
def selection_excluded?(name, selection, field)
selection[name] == 0
end
def selection_included?(name, selection, field)
selection.key?(name) || selection.keys.collect { |k| k.partition('.').first }.include?(name)
end
# Does the string contain dot syntax for accessing hashes?
#
# @api private
#
# @example Is the string in dot syntax.
# model.hash_dot_syntax?
#
# @return [ true, false ] If the string contains a "."
#
# @since 3.0.15
def hash_dot_syntax?(string)
string.include?(".".freeze)
end
# Return the typecasted value for a field.
#
# @example Get the value typecasted.
# person.typed_value_for(:title, :sir)
#
# @param [ String, Symbol ] key The field name.
# @param [ Object ] value The uncast value.
#
# @return [ Object ] The cast value.
#
# @since 1.0.0
private
def read_raw_attribute(name)
normalized = database_field_name(name.to_s)
if attribute_missing?(normalized)
raise ActiveModel::MissingAttributeError, "Missing attribute: '#{name}'."
end
if hash_dot_syntax?(normalized)
attributes.__nested__(normalized)
else
attributes[normalized]
end
end
module ClassMethods
# Alias the provided name to the original field. This will provide an
# aliased getter, setter, existence check, and all dirty attribute
# methods.
#
# @example Alias the attribute.
# class Product
# include Mongoid::Document
# field :price, :type => Float
# alias_attribute :cost, :price
# end
#
# @param [ Symbol ] name The new name.
# @param [ Symbol ] original The original name.
#
# @since 2.3.0
def alias_attribute(name, original)
aliased_fields[name.to_s] = original.to_s
class_eval <<-RUBY
alias #{name} #{original}
alias #{name}= #{original}=
alias #{name}? #{original}?
alias #{name}_change #{original}_change
alias #{name}_changed? #{original}_changed?
alias reset_#{name}! reset_#{original}!
alias reset_#{name}_to_default! reset_#{original}_to_default!
alias #{name}_was #{original}_was
alias #{name}_will_change! #{original}_will_change!
alias #{name}_before_type_cast #{original}_before_type_cast
RUBY
end
end
private
# Validates an attribute value. This provides validation checking if
# the value is valid for given a field.
# For now, only Hash and Array fields are validated.
#
# @param [ String, Symbol ] access The name of the attribute to validate.
# @param [ Object ] value The to be validated.
#
# @since 3.0.10
def validate_attribute_value(access, value)
return unless fields[access] && value
validatable_types = [ Hash, Array ]
if validatable_types.include? fields[access].type
unless value.is_a? fields[access].type
raise Mongoid::Errors::InvalidValue.new(fields[access].type, value.class)
end
end
end
def lookup_attribute_presence(name, value)
if localized_fields.has_key?(name) && value
value = localized_fields[name].send(:lookup, value)
end
value.present?
end
end
|
mongodb/mongo-ruby-driver | lib/mongo/session.rb | Mongo.Session.start_transaction | ruby | def start_transaction(options = nil)
if options
Lint.validate_read_concern_option(options[:read_concern])
end
check_if_ended!
if within_states?(STARTING_TRANSACTION_STATE, TRANSACTION_IN_PROGRESS_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation::TRANSACTION_ALREADY_IN_PROGRESS)
end
next_txn_num
@txn_options = options || @options[:default_transaction_options] || {}
if txn_write_concern && WriteConcern.send(:unacknowledged?, txn_write_concern)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation::UNACKNOWLEDGED_WRITE_CONCERN)
end
@state = STARTING_TRANSACTION_STATE
@already_committed = false
end | Places subsequent operations in this session into a new transaction.
Note that the transaction will not be started on the server until an
operation is performed after start_transaction is called.
@example Start a new transaction
session.start_transaction(options)
@param [ Hash ] options The options for the transaction being started.
@option options [ Hash ] read_concern The read concern options hash,
with the following optional keys:
- *:level* -- the read preference level as a symbol; valid values
are *:local*, *:majority*, and *:snapshot*
@option options [ Hash ] :write_concern The write concern options. Can be :w =>
Integer|String, :fsync => Boolean, :j => Boolean.
@option options [ Hash ] :read The read preference options. The hash may have the following
items:
- *:mode* -- read preference specified as a symbol; the only valid value is
*:primary*.
@raise [ Error::InvalidTransactionOperation ] If a transaction is already in
progress or if the write concern is unacknowledged.
@since 2.6.0 | train | https://github.com/mongodb/mongo-ruby-driver/blob/dca26d0870cb3386fad9ccc1d17228097c1fe1c8/lib/mongo/session.rb#L580-L602 | class Session
extend Forwardable
include Retryable
include Loggable
# Get the options for this session.
#
# @since 2.5.0
attr_reader :options
# Get the client through which this session was created.
#
# @since 2.5.1
attr_reader :client
# The cluster time for this session.
#
# @since 2.5.0
attr_reader :cluster_time
# The latest seen operation time for this session.
#
# @since 2.5.0
attr_reader :operation_time
# The options for the transaction currently being executed on the session.
#
# @since 2.6.0
attr_reader :txn_options
# Error message indicating that the session was retrieved from a client with a different cluster than that of the
# client through which it is currently being used.
#
# @since 2.5.0
MISMATCHED_CLUSTER_ERROR_MSG = 'The configuration of the client used to create this session does not match that ' +
'of the client owning this operation. Please only use this session for operations through its parent ' +
'client.'.freeze
# Error message describing that the session cannot be used because it has already been ended.
#
# @since 2.5.0
SESSION_ENDED_ERROR_MSG = 'This session has ended and cannot be used. Please create a new one.'.freeze
# Error message describing that sessions are not supported by the server version.
#
# @since 2.5.0
SESSIONS_NOT_SUPPORTED = 'Sessions are not supported by the connected servers.'.freeze
# The state of a session in which the last operation was not related to
# any transaction or no operations have yet occurred.
#
# @since 2.6.0
NO_TRANSACTION_STATE = :no_transaction
# The state of a session in which a user has initiated a transaction but
# no operations within the transactions have occurred yet.
#
# @since 2.6.0
STARTING_TRANSACTION_STATE = :starting_transaction
# The state of a session in which a transaction has been started and at
# least one operation has occurred, but the transaction has not yet been
# committed or aborted.
#
# @since 2.6.0
TRANSACTION_IN_PROGRESS_STATE = :transaction_in_progress
# The state of a session in which the last operation executed was a transaction commit.
#
# @since 2.6.0
TRANSACTION_COMMITTED_STATE = :transaction_committed
# The state of a session in which the last operation executed was a transaction abort.
#
# @since 2.6.0
TRANSACTION_ABORTED_STATE = :transaction_aborted
UNLABELED_WRITE_CONCERN_CODES = [
79, # UnknownReplWriteConcern
100, # CannotSatisfyWriteConcern,
].freeze
# Initialize a Session.
#
# @note Applications should use Client#start_session to begin a session.
#
# @example
# Session.new(server_session, client, options)
#
# @param [ ServerSession ] server_session The server session this session is associated with.
# @param [ Client ] client The client through which this session is created.
# @param [ Hash ] options The options for this session.
#
# @option options [ true|false ] :causal_consistency Whether to enable
# causal consistency for this session.
# @option options [ Hash ] :default_transaction_options Options to pass
# to start_transaction by default, can contain any of the options that
# start_transaction accepts.
# @option options [ true|false ] :implicit For internal driver use only -
# specifies whether the session is implicit.
# @option options [ Hash ] :read_preference The read preference options hash,
# with the following optional keys:
# - *:mode* -- the read preference as a string or symbol; valid values are
# *:primary*, *:primary_preferred*, *:secondary*, *:secondary_preferred*
# and *:nearest*.
#
# @since 2.5.0
# @api private
def initialize(server_session, client, options = {})
@server_session = server_session
options = options.dup
# Because the read preference will need to be inserted into a command as a string, we convert
# it from a symbol immediately upon receiving it.
if options[:read_preference] && options[:read_preference][:mode]
options[:read_preference][:mode] = options[:read_preference][:mode].to_s
end
@client = client.use(:admin)
@options = options.freeze
@cluster_time = nil
@state = NO_TRANSACTION_STATE
end
# Get a formatted string for use in inspection.
#
# @example Inspect the session object.
# session.inspect
#
# @return [ String ] The session inspection.
#
# @since 2.5.0
def inspect
"#<Mongo::Session:0x#{object_id} session_id=#{session_id} options=#{@options}>"
end
# End this session.
#
# @example
# session.end_session
#
# @return [ nil ] Always nil.
#
# @since 2.5.0
def end_session
if !ended? && @client
if within_states?(TRANSACTION_IN_PROGRESS_STATE)
begin
abort_transaction
rescue Mongo::Error
end
end
@client.cluster.session_pool.checkin(@server_session)
end
ensure
@server_session = nil
end
# Whether this session has ended.
#
# @example
# session.ended?
#
# @return [ true, false ] Whether the session has ended.
#
# @since 2.5.0
def ended?
@server_session.nil?
end
# Add the autocommit field to a command document if applicable.
#
# @example
# session.add_autocommit!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
def add_autocommit!(command)
command.tap do |c|
c[:autocommit] = false if in_transaction?
end
end
# Add this session's id to a command document.
#
# @example
# session.add_id!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.5.0
# @api private
def add_id!(command)
command.merge!(lsid: session_id)
end
# Add the startTransaction field to a command document if applicable.
#
# @example
# session.add_start_transaction!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
def add_start_transaction!(command)
command.tap do |c|
if starting_transaction?
c[:startTransaction] = true
end
end
end
# Add the transaction number to a command document if applicable.
#
# @example
# session.add_txn_num!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
def add_txn_num!(command)
command.tap do |c|
c[:txnNumber] = BSON::Int64.new(@server_session.txn_num) if in_transaction?
end
end
# Add the transactions options if applicable.
#
# @example
# session.add_txn_opts!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
def add_txn_opts!(command, read)
command.tap do |c|
# The read preference should be added for all read operations.
if read && txn_read_pref = txn_read_preference
Mongo::Lint.validate_underscore_read_preference(txn_read_pref)
txn_read_pref = txn_read_pref.dup
txn_read_pref[:mode] = txn_read_pref[:mode].to_s.gsub(/(_\w)/) { |match| match[1].upcase }
Mongo::Lint.validate_camel_case_read_preference(txn_read_pref)
c['$readPreference'] = txn_read_pref
end
# The read concern should be added to any command that starts a transaction.
if starting_transaction?
# https://jira.mongodb.org/browse/SPEC-1161: transaction's
# read concern overrides collection/database/client read concerns,
# even if transaction's read concern is not set.
# Read concern here is the one sent to the server and may
# include afterClusterTime.
if rc = c[:readConcern]
rc = rc.dup
rc.delete(:level)
end
if txn_read_concern
if rc
rc.update(txn_read_concern)
else
rc = txn_read_concern.dup
end
end
if rc.nil? || rc.empty?
c.delete(:readConcern)
else
c[:readConcern ] = rc
end
end
# We need to send the read concern level as a string rather than a symbol.
if c[:readConcern] && c[:readConcern][:level]
c[:readConcern][:level] = c[:readConcern][:level].to_s
end
# The write concern should be added to any abortTransaction or commitTransaction command.
if (c[:abortTransaction] || c[:commitTransaction])
if @already_committed
wc = BSON::Document.new(c[:writeConcern] || txn_write_concern || {})
wc.merge!(w: :majority)
wc[:wtimeout] ||= 10000
c[:writeConcern] = wc
elsif txn_write_concern
c[:writeConcern] ||= txn_write_concern
end
end
# A non-numeric write concern w value needs to be sent as a string rather than a symbol.
if c[:writeConcern] && c[:writeConcern][:w] && c[:writeConcern][:w].is_a?(Symbol)
c[:writeConcern][:w] = c[:writeConcern][:w].to_s
end
end
end
# Remove the read concern and/or write concern from the command if not applicable.
#
# @example
# session.suppress_read_write_concern!(cmd)
#
# @return [ Hash, BSON::Document ] The command document.
#
# @since 2.6.0
# @api private
def suppress_read_write_concern!(command)
command.tap do |c|
next unless in_transaction?
c.delete(:readConcern) unless starting_transaction?
c.delete(:writeConcern) unless c[:commitTransaction] || c[:abortTransaction]
end
end
# Ensure that the read preference of a command primary.
#
# @example
# session.validate_read_preference!(command)
#
# @raise [ Mongo::Error::InvalidTransactionOperation ] If the read preference of the command is
# not primary.
#
# @since 2.6.0
# @api private
def validate_read_preference!(command)
return unless in_transaction? && non_primary_read_preference_mode?(command)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation::INVALID_READ_PREFERENCE)
end
# Update the state of the session due to a (non-commit and non-abort) operation being run.
#
# @since 2.6.0
# @api private
def update_state!
case @state
when STARTING_TRANSACTION_STATE
@state = TRANSACTION_IN_PROGRESS_STATE
when TRANSACTION_COMMITTED_STATE, TRANSACTION_ABORTED_STATE
@state = NO_TRANSACTION_STATE
end
end
# Validate the session.
#
# @example
# session.validate!(cluster)
#
# @param [ Cluster ] cluster The cluster the session is attempted to be used with.
#
# @return [ nil ] nil if the session is valid.
#
# @raise [ Mongo::Error::InvalidSession ] Raise error if the session is not valid.
#
# @since 2.5.0
# @api private
def validate!(cluster)
check_matching_cluster!(cluster)
check_if_ended!
self
end
# Process a response from the server that used this session.
#
# @example Process a response from the server.
# session.process(result)
#
# @param [ Operation::Result ] result The result from the operation.
#
# @return [ Operation::Result ] The result.
#
# @since 2.5.0
# @api private
def process(result)
unless implicit?
set_operation_time(result)
set_cluster_time(result)
end
@server_session.set_last_use!
result
end
# Advance the cached cluster time document for this session.
#
# @example Advance the cluster time.
# session.advance_cluster_time(doc)
#
# @param [ BSON::Document, Hash ] new_cluster_time The new cluster time.
#
# @return [ BSON::Document, Hash ] The new cluster time.
#
# @since 2.5.0
def advance_cluster_time(new_cluster_time)
if @cluster_time
@cluster_time = [ @cluster_time, new_cluster_time ].max_by { |doc| doc[Cluster::CLUSTER_TIME] }
else
@cluster_time = new_cluster_time
end
end
# Advance the cached operation time for this session.
#
# @example Advance the operation time.
# session.advance_operation_time(timestamp)
#
# @param [ BSON::Timestamp ] new_operation_time The new operation time.
#
# @return [ BSON::Timestamp ] The max operation time, considering the current and new times.
#
# @since 2.5.0
def advance_operation_time(new_operation_time)
if @operation_time
@operation_time = [ @operation_time, new_operation_time ].max
else
@operation_time = new_operation_time
end
end
# Whether reads executed with this session can be retried according to
# the modern retryable reads specification.
#
# If this method returns true, the modern retryable reads have been
# requested by the application. If the server selected for a read operation
# supports modern retryable reads, they will be used for that particular
# operation. If the server selected for a read operation does not support
# modern retryable reads, the read will not be retried.
#
# If this method returns false, legacy retryable reads have been requested
# by the application. Legacy retryable read logic will be used regardless
# of server version of the server(s) that the client is connected to.
# The number of read retries is given by :max_read_retries client option,
# which is 1 by default and can be set to 0 to disable legacy read retries.
#
# @api private
def retry_reads?
client.options[:retry_reads] != false
end
# Will writes executed with this session be retried.
#
# @example Will writes be retried.
# session.retry_writes?
#
# @return [ true, false ] If writes will be retried.
#
# @note Retryable writes are only available on server versions at least 3.6
# and with sharded clusters or replica sets.
#
# @since 2.5.0
def retry_writes?
!!client.options[:retry_writes] && (cluster.replica_set? || cluster.sharded?)
end
# Get the server session id of this session, if the session was not ended.
# If the session was ended, returns nil.
#
# @example Get the session id.
# session.session_id
#
# @return [ BSON::Document ] The server session id.
#
# @since 2.5.0
def session_id
if ended?
raise Error::SessionEnded
end
@server_session.session_id
end
# Increment and return the next transaction number.
#
# @example Get the next transaction number.
# session.next_txn_num
#
# @return [ Integer ] The next transaction number.
#
# @since 2.5.0
# @api private
def next_txn_num
if ended?
raise Error::SessionEnded
end
@server_session.next_txn_num
end
# Get the current transaction number.
#
# @example Get the current transaction number.
# session.txn_num
#
# @return [ Integer ] The current transaction number.
#
# @since 2.6.0
def txn_num
if ended?
raise Error::SessionEnded
end
@server_session.txn_num
end
# Is this session an implicit one (not user-created).
#
# @example Is the session implicit?
# session.implicit?
#
# @return [ true, false ] Whether this session is implicit.
#
# @since 2.5.1
def implicit?
@implicit ||= !!(@options.key?(:implicit) && @options[:implicit] == true)
end
# Is this session an explicit one (i.e. user-created).
#
# @example Is the session explicit?
# session.explicit?
#
# @return [ true, false ] Whether this session is explicit.
#
# @since 2.5.2
def explicit?
@explicit ||= !implicit?
end
# Places subsequent operations in this session into a new transaction.
#
# Note that the transaction will not be started on the server until an
# operation is performed after start_transaction is called.
#
# @example Start a new transaction
# session.start_transaction(options)
#
# @param [ Hash ] options The options for the transaction being started.
#
# @option options [ Hash ] read_concern The read concern options hash,
# with the following optional keys:
# - *:level* -- the read preference level as a symbol; valid values
# are *:local*, *:majority*, and *:snapshot*
# @option options [ Hash ] :write_concern The write concern options. Can be :w =>
# Integer|String, :fsync => Boolean, :j => Boolean.
# @option options [ Hash ] :read The read preference options. The hash may have the following
# items:
# - *:mode* -- read preference specified as a symbol; the only valid value is
# *:primary*.
#
# @raise [ Error::InvalidTransactionOperation ] If a transaction is already in
# progress or if the write concern is unacknowledged.
#
# @since 2.6.0
# Commit the currently active transaction on the session.
#
# @example Commits the transaction.
# session.commit_transaction
#
# @option options :write_concern [ nil | WriteConcern::Base ] The write
# concern to use for this operation.
#
# @raise [ Error::InvalidTransactionOperation ] If there is no active transaction.
#
# @since 2.6.0
def commit_transaction(options=nil)
check_if_ended!
check_if_no_transaction!
if within_states?(TRANSACTION_ABORTED_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation.cannot_call_after_msg(
:abortTransaction, :commitTransaction))
end
options ||= {}
begin
# If commitTransaction is called twice, we need to run the same commit
# operation again, so we revert the session to the previous state.
if within_states?(TRANSACTION_COMMITTED_STATE)
@state = @last_commit_skipped ? STARTING_TRANSACTION_STATE : TRANSACTION_IN_PROGRESS_STATE
@already_committed = true
end
if starting_transaction?
@last_commit_skipped = true
else
@last_commit_skipped = false
write_concern = options[:write_concern] || txn_options[:write_concern]
if write_concern && !write_concern.is_a?(WriteConcern::Base)
write_concern = WriteConcern.get(write_concern)
end
write_with_retry(self, write_concern, true) do |server, txn_num, is_retry|
if is_retry
if write_concern
wco = write_concern.options.merge(w: :majority)
wco[:wtimeout] ||= 10000
write_concern = WriteConcern.get(wco)
else
write_concern = WriteConcern.get(w: :majority, wtimeout: 10000)
end
end
Operation::Command.new(
selector: { commitTransaction: 1 },
db_name: 'admin',
session: self,
txn_num: txn_num,
write_concern: write_concern,
).execute(server)
end
end
rescue Mongo::Error::NoServerAvailable, Mongo::Error::SocketError => e
e.send(:add_label, Mongo::Error::UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)
raise e
rescue Mongo::Error::OperationFailure => e
err_doc = e.instance_variable_get(:@result).send(:first_document)
if e.write_retryable? || (err_doc['writeConcernError'] &&
!UNLABELED_WRITE_CONCERN_CODES.include?(err_doc['writeConcernError']['code']))
e.send(:add_label, Mongo::Error::UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)
end
raise e
ensure
@state = TRANSACTION_COMMITTED_STATE
end
end
# Abort the currently active transaction without making any changes to the database.
#
# @example Abort the transaction.
# session.abort_transaction
#
# @raise [ Error::InvalidTransactionOperation ] If there is no active transaction.
#
# @since 2.6.0
def abort_transaction
check_if_ended!
check_if_no_transaction!
if within_states?(TRANSACTION_COMMITTED_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation.cannot_call_after_msg(
:commitTransaction, :abortTransaction))
end
if within_states?(TRANSACTION_ABORTED_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation.cannot_call_twice_msg(:abortTransaction))
end
begin
unless starting_transaction?
write_with_retry(self, txn_options[:write_concern], true) do |server, txn_num|
Operation::Command.new(
selector: { abortTransaction: 1 },
db_name: 'admin',
session: self,
txn_num: txn_num
).execute(server)
end
end
@state = TRANSACTION_ABORTED_STATE
rescue Mongo::Error::InvalidTransactionOperation
raise
rescue Mongo::Error
@state = TRANSACTION_ABORTED_STATE
rescue Exception
@state = TRANSACTION_ABORTED_STATE
raise
end
end
# Whether or not the session is currently in a transaction.
#
# @example Is the session in a transaction?
# session.in_transaction?
#
# @return [ true | false ] Whether or not the session in a transaction.
#
# @since 2.6.0
def in_transaction?
within_states?(STARTING_TRANSACTION_STATE, TRANSACTION_IN_PROGRESS_STATE)
end
# Executes the provided block in a transaction, retrying as necessary.
#
# Returns the return value of the block.
#
# Exact number of retries and when they are performed are implementation
# details of the driver; the provided block should be idempotent, and
# should be prepared to be called more than once. The driver may retry
# the commit command within an active transaction or it may repeat the
# transaction and invoke the block again, depending on the error
# encountered if any. Note also that the retries may be executed against
# different servers.
#
# Transactions cannot be nested - InvalidTransactionOperation will be raised
# if this method is called when the session already has an active transaction.
#
# Exceptions raised by the block which are not derived from Mongo::Error
# stop processing, abort the transaction and are propagated out of
# with_transaction. Exceptions derived from Mongo::Error may be
# handled by with_transaction, resulting in retries of the process.
#
# Currently, with_transaction will retry commits and block invocations
# until at least 120 seconds have passed since with_transaction started
# executing. This timeout is not configurable and may change in a future
# driver version.
#
# @note with_transaction contains a loop, therefore the if with_transaction
# itself is placed in a loop, its block should not call next or break to
# control the outer loop because this will instead affect the loop in
# with_transaction. The driver will warn and abort the transaction
# if it detects this situation.
#
# @example Execute a statement in a transaction
# session.with_transaction(write_concern: {w: :majority}) do
# collection.update_one({ id: 3 }, { '$set' => { status: 'Inactive'} },
# session: session)
#
# end
#
# @example Execute a statement in a transaction, limiting total time consumed
# Timeout.timeout(5) do
# session.with_transaction(write_concern: {w: :majority}) do
# collection.update_one({ id: 3 }, { '$set' => { status: 'Inactive'} },
# session: session)
#
# end
# end
#
# @param [ Hash ] options The options for the transaction being started.
# These are the same options that start_transaction accepts.
#
# @raise [ Error::InvalidTransactionOperation ] If a transaction is already in
# progress or if the write concern is unacknowledged.
#
# @since 2.7.0
def with_transaction(options=nil)
# Non-configurable 120 second timeout for the entire operation
deadline = Time.now + 120
transaction_in_progress = false
loop do
commit_options = {}
if options
commit_options[:write_concern] = options[:write_concern]
end
start_transaction(options)
transaction_in_progress = true
begin
rv = yield self
rescue Exception => e
if within_states?(STARTING_TRANSACTION_STATE, TRANSACTION_IN_PROGRESS_STATE)
abort_transaction
transaction_in_progress = false
end
if Time.now >= deadline
transaction_in_progress = false
raise
end
if e.is_a?(Mongo::Error) && e.label?(Mongo::Error::TRANSIENT_TRANSACTION_ERROR_LABEL)
next
end
raise
else
if within_states?(TRANSACTION_ABORTED_STATE, NO_TRANSACTION_STATE, TRANSACTION_COMMITTED_STATE)
transaction_in_progress = false
return rv
end
begin
commit_transaction(commit_options)
transaction_in_progress = false
return rv
rescue Mongo::Error => e
if e.label?(Mongo::Error::UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)
# WriteConcernFailed
if e.is_a?(Mongo::Error::OperationFailure) && e.code == 64 && e.wtimeout?
transaction_in_progress = false
raise
end
if Time.now >= deadline
transaction_in_progress = false
raise
end
wc_options = case v = commit_options[:write_concern]
when WriteConcern::Base
v.options
when nil
{}
else
v
end
commit_options[:write_concern] = wc_options.merge(w: :majority)
retry
elsif e.label?(Mongo::Error::TRANSIENT_TRANSACTION_ERROR_LABEL)
if Time.now >= deadline
transaction_in_progress = false
raise
end
next
else
transaction_in_progress = false
raise
end
end
end
end
ensure
if transaction_in_progress
log_warn('with_transaction callback altered with_transaction loop, aborting transaction')
begin
abort_transaction
rescue Error::OperationFailure, Error::InvalidTransactionOperation
end
end
end
# Get the read preference the session will use in the currently
# active transaction.
#
# This is a driver style hash with underscore keys.
#
# @example Get the transaction's read preference
# session.txn_read_preference
#
# @return [ Hash ] The read preference of the transaction.
#
# @since 2.6.0
def txn_read_preference
rp = txn_options && txn_options[:read_preference] ||
@client.read_preference
Mongo::Lint.validate_underscore_read_preference(rp)
rp
end
def cluster
@client.cluster
end
protected
# Get the read concern the session will use when starting a transaction.
#
# This is a driver style hash with underscore keys.
#
# @example Get the session's transaction read concern.
# session.txn_read_concern
#
# @return [ Hash ] The read concern used for starting transactions.
#
# @since 2.9.0
def txn_read_concern
# Read concern is inherited from client but not db or collection.
txn_options && txn_options[:read_concern] || @client.read_concern
end
private
def within_states?(*states)
states.include?(@state)
end
def starting_transaction?
within_states?(STARTING_TRANSACTION_STATE)
end
def check_if_no_transaction!
return unless within_states?(NO_TRANSACTION_STATE)
raise Mongo::Error::InvalidTransactionOperation.new(
Mongo::Error::InvalidTransactionOperation::NO_TRANSACTION_STARTED)
end
def txn_write_concern
(txn_options && txn_options[:write_concern]) ||
(@client.write_concern && @client.write_concern.options)
end
def non_primary_read_preference_mode?(command)
return false unless command['$readPreference']
mode = command['$readPreference']['mode'] || command['$readPreference'][:mode]
mode && mode != 'primary'
end
# Returns causal consistency document if the last operation time is
# known and causal consistency is enabled, otherwise returns nil.
def causal_consistency_doc
if operation_time && causal_consistency?
{:afterClusterTime => operation_time}
else
nil
end
end
def causal_consistency?
@causal_consistency ||= (if @options.key?(:causal_consistency)
!!@options[:causal_consistency]
else
true
end)
end
def set_operation_time(result)
if result && result.operation_time
@operation_time = result.operation_time
end
end
def set_cluster_time(result)
if cluster_time_doc = result.cluster_time
if @cluster_time.nil?
@cluster_time = cluster_time_doc
elsif cluster_time_doc[Cluster::CLUSTER_TIME] > @cluster_time[Cluster::CLUSTER_TIME]
@cluster_time = cluster_time_doc
end
end
end
def check_if_ended!
raise Mongo::Error::InvalidSession.new(SESSION_ENDED_ERROR_MSG) if ended?
end
def check_matching_cluster!(cluster)
if @client.cluster != cluster
raise Mongo::Error::InvalidSession.new(MISMATCHED_CLUSTER_ERROR_MSG)
end
end
end
|
PierreRambaud/gemirro | lib/gemirro/mirror_directory.rb | Gemirro.MirrorDirectory.add_file | ruby | def add_file(name, content)
full_path = File.join(@path, name)
file = MirrorFile.new(full_path)
file.write(content)
file
end | Creates a new file with the given name and content.
@param [String] name
@param [String] content
@return [Gem::MirrorFile] | train | https://github.com/PierreRambaud/gemirro/blob/5c6b5abb5334ed3beb256f6764bc336e2cf2dc21/lib/gemirro/mirror_directory.rb#L39-L46 | class MirrorDirectory
attr_reader :path
##
# @param [String] path
#
def initialize(path)
@path = path
end
##
# Creates directory or directories with the given path.
#
# @param [String] dir_path
# @return [Gemirro::MirrorDirectory]
#
def add_directory(dir_path)
full_path = File.join(@path, dir_path)
FileUtils.mkdir_p(full_path) unless File.directory?(full_path)
self.class.new(full_path)
end
##
# Creates a new file with the given name and content.
#
# @param [String] name
# @param [String] content
# @return [Gem::MirrorFile]
#
##
# Checks if a given file exists in the current directory.
#
# @param [String] name
# @return [TrueClass|FalseClass]
#
def file_exists?(name)
File.file?(File.join(@path, name))
end
end
|
sunspot/sunspot | sunspot/lib/sunspot/setup.rb | Sunspot.Setup.add_field_factory | ruby | def add_field_factory(name, type, options = {}, &block)
stored, more_like_this = options[:stored], options[:more_like_this]
field_factory = FieldFactory::Static.new(name, type, options, &block)
@field_factories[field_factory.signature] = field_factory
@field_factories_cache[field_factory.name] = field_factory
if stored
@stored_field_factories_cache[field_factory.name] << field_factory
end
if more_like_this
@more_like_this_field_factories_cache[field_factory.name] << field_factory
end
end | Add field factory for scope/ordering | train | https://github.com/sunspot/sunspot/blob/31dd76cd7a14a4ef7bd541de97483d8cd72ff685/sunspot/lib/sunspot/setup.rb#L28-L39 | class Setup #:nodoc:
attr_reader :class_object_id
def initialize(clazz)
@class_object_id = clazz.object_id
@class_name = clazz.name
@field_factories, @text_field_factories, @dynamic_field_factories,
@field_factories_cache, @text_field_factories_cache,
@dynamic_field_factories_cache = *Array.new(6) { Hash.new }
@stored_field_factories_cache = Hash.new { |h, k| h[k] = [] }
@more_like_this_field_factories_cache = Hash.new { |h, k| h[k] = [] }
@dsl = DSL::Fields.new(self)
@document_boost_extractor = nil
add_field_factory(:class, Type::ClassType.instance)
end
def type_names
[@class_name]
end
#
# Add field factory for scope/ordering
#
def add_join_field_factory(name, type, options = {}, &block)
field_factory = FieldFactory::Join.new(name, type, options, &block)
@field_factories[field_factory.signature] = field_factory
if type.is_a?(Type::TextType)
@text_field_factories_cache[field_factory.name] = field_factory
else
@field_factories_cache[field_factory.name] = field_factory
end
end
#
# Add field_factories for fulltext search
#
# ==== Parameters
#
# field_factories<Array>:: Array of Sunspot::Field objects
#
def add_text_field_factory(name, options = {}, &block)
stored, more_like_this = options[:stored], options[:more_like_this]
field_factory = FieldFactory::Static.new(name, Type::TextType.instance, options, &block)
@text_field_factories[name] = field_factory
@text_field_factories_cache[field_factory.name] = field_factory
if stored
@stored_field_factories_cache[field_factory.name] << field_factory
end
if more_like_this
@more_like_this_field_factories_cache[field_factory.name] << field_factory
end
end
#
# Add dynamic field_factories
#
# ==== Parameters
#
# field_factories<Array>:: Array of dynamic field objects
#
def add_dynamic_field_factory(name, type, options = {}, &block)
stored, more_like_this = options[:stored], options[:more_like_this]
field_factory = FieldFactory::Dynamic.new(name, type, options, &block)
@dynamic_field_factories[field_factory.signature] = field_factory
@dynamic_field_factories_cache[field_factory.name] = field_factory
if stored
@stored_field_factories_cache[field_factory.name] << field_factory
end
if more_like_this
@more_like_this_field_factories_cache[field_factory.name] << field_factory
end
end
#
# Add a document boost to documents at index time. Document boost can be
# static (the same for all documents of this class), or extracted on a per-
# document basis using either attribute or block extraction as per usual.
#
def add_document_boost(attr_name, &block)
@document_boost_extractor =
if attr_name
if attr_name.respond_to?(:to_f)
DataExtractor::Constant.new(attr_name)
else
DataExtractor::AttributeExtractor.new(attr_name)
end
else
DataExtractor::BlockExtractor.new(&block)
end
end
#
# Builder method for evaluating the setup DSL
#
def setup(&block)
Util.instance_eval_or_call(@dsl, &block)
end
#
# Return the Field with the given (public-facing) name
#
def field(field_name)
if field_factory = @field_factories_cache[field_name.to_sym]
field_factory.build
else
raise(
UnrecognizedFieldError,
"No field configured for #{@class_name} with name '#{field_name}'"
)
end
end
#
# Return one or more text fields with the given public-facing name. This
# implementation will always return a single field (in an array), but
# CompositeSetup objects might return more than one.
#
def text_fields(field_name)
text_field =
if field_factory = @text_field_factories_cache[field_name.to_sym]
field_factory.build
else
raise(
UnrecognizedFieldError,
"No text field configured for #{@class_name} with name '#{field_name}'"
)
end
[text_field]
end
#
# Return one or more stored fields (can be either attribute or text fields)
# for the given name.
#
def stored_fields(field_name, dynamic_field_name = nil)
@stored_field_factories_cache[field_name.to_sym].map do |field_factory|
if dynamic_field_name
field_factory.build(dynamic_field_name)
else
field_factory.build
end
end
end
#
# Return one or more more_like_this fields (can be either attribute or text fields)
# for the given name.
#
def more_like_this_fields(field_name)
@more_like_this_field_factories_cache[field_name.to_sym].map do |field_factory|
field_factory.build
end
end
#
# Return the DynamicFieldFactory with the given base name
#
def dynamic_field_factory(field_name)
@dynamic_field_factories_cache[field_name.to_sym] || raise(
UnrecognizedFieldError,
"No dynamic field configured for #{@class_name} with name '#{field_name}'"
)
end
#
# Return all attribute fields
#
def fields
field_factories.map { |field_factory| field_factory.build }
end
#
# Return all text fields
#
def all_text_fields
text_field_factories.map { |text_field_factory| text_field_factory.build }
end
#
# Return all more_like_this fields
#
def all_more_like_this_fields
@more_like_this_field_factories_cache.values.map do |field_factories|
field_factories.map { |field_factory| field_factory.build }
end.flatten
end
#
# Get the field_factories associated with this setup as well as all inherited field_factories
#
# ==== Returns
#
# Array:: Collection of all field_factories associated with this setup
#
def field_factories
collection_from_inheritable_hash(:field_factories)
end
#
# Get the text field_factories associated with this setup as well as all inherited
# text field_factories
#
# ==== Returns
#
# Array:: Collection of all text field_factories associated with this setup
#
def text_field_factories
collection_from_inheritable_hash(:text_field_factories)
end
#
# Get all static, dynamic, and text field_factories associated with this setup as
# well as all inherited field_factories
#
# ==== Returns
#
# Array:: Collection of all text and scope field_factories associated with this setup
#
def all_field_factories
all_field_factories = []
all_field_factories.concat(field_factories).concat(text_field_factories).concat(dynamic_field_factories)
all_field_factories
end
#
# Get all dynamic field_factories for this and parent setups
#
# ==== Returns
#
# Array:: Dynamic field_factories
#
def dynamic_field_factories
collection_from_inheritable_hash(:dynamic_field_factories)
end
#
# Return the class associated with this setup.
#
# ==== Returns
#
# clazz<Class>:: Class setup is configured for
#
def clazz
Util.full_const_get(@class_name)
end
#
# Get the document boost for a given model
#
def document_boost_for(model)
if @document_boost_extractor
@document_boost_extractor.value_for(model)
end
end
protected
#
# Get the nearest inherited setup, if any
#
# ==== Returns
#
# Sunspot::Setup:: Setup for the nearest ancestor of this setup's class
#
def parent
Setup.for(clazz.superclass)
end
def get_inheritable_hash(name)
hash = instance_variable_get(:"@#{name}")
parent.get_inheritable_hash(name).each_pair do |key, value|
hash[key] = value unless hash.has_key?(key)
end if parent
hash
end
private
def collection_from_inheritable_hash(name)
get_inheritable_hash(name).values
end
class <<self
#
# Retrieve or create the Setup instance for the given class, evaluating
# the given block to add to the setup's configuration
#
def setup(clazz, &block) #:nodoc:
self.for!(clazz).setup(&block)
end
#
# Retrieve the setup instance for the given class, or for the nearest
# ancestor that has a setup, if any.
#
# ==== Parameters
#
# clazz<Class>:: Class for which to retrieve a setup
#
# ==== Returns
#
# Sunspot::Setup::
# Setup instance associated with the given class or its nearest ancestor
#
def for(clazz) #:nodoc:
setups[clazz.name.to_sym] || self.for(clazz.superclass) if clazz
end
protected
#
# Retrieve or create a Setup instance for this class
#
# ==== Parameters
#
# clazz<Class>:: Class for which to retrieve a setup
#
# ==== Returns
#
# Sunspot::Setup:: New or existing setup for this class
#
def for!(clazz) #:nodoc:
setup = setups[clazz.name.to_sym]
if setup && setup.class_object_id == clazz.object_id
setup
else
setups[clazz.name.to_sym] = new(clazz)
end
end
private
# Singleton hash of class names to Setup instances
#
# ==== Returns
#
# Hash:: Class names keyed to Setup instances
#
def setups
@setups ||= {}
end
end
end
|
chikamichi/logg | lib/logg/core.rb | Logg.Dispatcher.method_missing | ruby | def method_missing(meth, *args, &block)
@namespace = meth.to_s
@message = (args.first.to_s == 'debug') ? nil : args.first.to_s
self.send :output!
end | The Dispatcher default behavior relies on #method_missing. It sets both the
message and a namespace, then auto-sends the order to output. | train | https://github.com/chikamichi/logg/blob/fadc70f80ee48930058db131888aabf7da21da2d/lib/logg/core.rb#L85-L89 | class Dispatcher
class Render
# Render a template. Just a mere proxy for Tilt::Template#render method,
# the first argument being the filepath or file, and the latter,
# the usual arguments for Tilt's #render.
#
# @param [String, #path, #realpath] path filepath or an object behaving
# like a legacy File
# @param [Object] obj context object the template will be rendered within
# @param [Hash] args rendering context
# @option [Symbol] :as syntax engine
# @option [Object] :data template's rendering contextual object
# @option [Hash] :locals template's locals
# @return [String] the interpolated template
#
def render(path, *args)
args = args.first
path = detect_path(path)
tpl = fetch_template(args, path)
tpl.render(args[:data], args[:locals])
end
def render_inline(content, *args)
args = args.first
syntax = detect_syntax(args)
res = Object
Better::Tempfile.open(['dummylogg', ".#{syntax}"]) do |f|
f.write(content)
f.rewind
res = Tilt.new(f.path).render(args[:data], args[:locals])
end
res
end
def detect_path(path)
if path.respond_to?(:path)
path.path
elsif path.respond_to?(:realpath)
path.to_s
elsif path.respond_to?(:to_s)
path.to_s
else
raise ArgumentError, 'Missing file or a filepath.'
end
end
def fetch_template(args, path)
if args[:as]
begin
test_path = Pathname.new(path)
raise ArgumentError, "Invalid filepath #{path}" unless test_path.file?
rescue
test_path = Pathname.new(path + ".#{args[:as].to_s.downcase}")
raise ArgumentError, "Invalid filepath #{path}" unless test_path.file?
path = test_path.to_s
end
Tilt.const_get("#{args[:as].to_s.downcase.capitalize}Template").new(path)
else
Tilt.new(path)
end
end
def detect_syntax(options)
unless options.has_key?(:as)
raise ArgumentError, 'Missing template syntax specified as the :as option.'
end
options[:as].to_s
end
end
attr_reader :message, :namespace
# The Dispatcher default behavior relies on #method_missing. It sets both the
# message and a namespace, then auto-sends the order to output.
#
def eigenclass
class << self; self; end
end
# Define a custom logger, using a template. The template may be defined
# within the block as a (multi-line) string, or one may reference a
# file.
# # do whatever you want with data or anything else, for instance,
# send mails, tweet, then…
#
# Inline templates (defined within the block) make use of #render_inline
# (indentation broken for the sake of example readibility):
#
# logger.as(:custom) do |response|
# tpl = <<-TPL
# %h2 Query log report
# %span
# Statu:
# = data.status
# %span
# Response:
# = data.body
# %br/
# TPL
# puts render_inline(tpl, :as => :haml, :data => response)
# end
#
# With an external template, one should use the #render helper to, well,
# render the template file. The extension will be used to infer the proper
# rendering engine. If not provided or when a custom extension is used, one
# may declare the template syntax.
#
# logger.as(:custom) do |data|
# # do whatever you want with data or anything else, then…
# out = render('my/template.erb', :data => data)
# # one may then use out to send mails, log to file, tweet…
# end
#
# logger.as(:custom) do |data|
# render('my/template', :as => :erb, :data => data)
# end
#
# See #render and #render_inline for more details.
#
# TODO: memoize the Render instance somehow? Or find another trick to
# execute the block.
#
def as(method, &block)
raise ArgumentError, 'Missing mandatory block' unless block_given?
method = method.to_sym
# Define the guard at class-level, if not already defined.
if !eigenclass.respond_to?(method)
eigenclass.send(:define_method, method) do |*args|
Render.new.instance_exec(*args, &block)
end
end
# Define the guard at instance-level by overriding #initialize, if not
# already defined.
eigenclass.send(:define_method, :new) do
o = super
if !o.respond_to?(method)
o.send(:define_method, method) do |*args|
Render.new.instance_exec(*args, &block)
end
end
o
end
end
private
# Default logging behavior. Outputs to $stdout using #puts and return
# the message.
#
def output!
output = "#{Time.now} | "
output += "[#{@namespace.gsub('_', ' ')}] " unless @namespace.nil?
output += @message
puts output if defined?(Logg::ALWAYS_PUTS) && Logg::ALWAYS_PUTS
return output
end
end
|
metanorma/relaton | lib/relaton/db.rb | Relaton.Db.to_xml | ruby | def to_xml
db = @local_db || @db || return
Nokogiri::XML::Builder.new(encoding: "UTF-8") do |xml|
xml.documents do
xml.parent.add_child db.all.join(" ")
end
end.to_xml
end | list all entries as a serialization
@return [String] | train | https://github.com/metanorma/relaton/blob/2fac19da2f3ef3c30b8e8d8815a14d2115df0be6/lib/relaton/db.rb#L90-L97 | class Db
SUPPORTED_GEMS = %w[isobib ietfbib gbbib iecbib nistbib].freeze
# @param global_cache [String] directory of global DB
# @param local_cache [String] directory of local DB
def initialize(global_cache, local_cache)
register_gems
@registry = Relaton::Registry.instance
@db = open_cache_biblio(global_cache)
@local_db = open_cache_biblio(local_cache, global: false)
@db_name = global_cache
@local_db_name = local_cache
end
def register_gems
puts "[relaton] Info: detecting backends:"
SUPPORTED_GEMS.each do |b|
# puts b
begin
require b
rescue LoadError
puts "[relaton] Error: backend #{b} not present"
end
end
end
# The class of reference requested is determined by the prefix of the code:
# GB Standard for gbbib, IETF for ietfbib, ISO for isobib, IEC or IEV for iecbib,
# @param code [String] the ISO standard Code to look up (e.g. "ISO 9000")
# @param year [String] the year the standard was published (optional)
# @param opts [Hash] options; restricted to :all_parts if all-parts reference is required
# @return [String] Relaton XML serialisation of reference
def fetch(code, year = nil, opts = {})
stdclass = standard_class(code) or return nil
check_bibliocache(code, year, opts, stdclass)
end
def fetch_std(code, year = nil, stdclass = nil, opts = {})
std = nil
@registry.processors.each do |name, processor|
std = name if processor.prefix == stdclass
end
unless std
std = standard_class(code) or return nil
end
check_bibliocache(code, year, opts, std)
end
def fetched(key)
return @local_db.fetched key if @local_db
return @db.fetched key if @db
""
end
# The document identifier class corresponding to the given code
def docid_type(code)
stdclass = standard_class(code) or return [nil, code]
prefix, code = strip_id_wrapper(code, stdclass)
[@registry.processors[stdclass].idtype, code]
end
# @param key [String]
# @return [Hash]
def load_entry(key)
unless @local_db.nil?
entry = @local_db[key]
return entry if entry
end
@db[key]
end
# @param key [String]
# @param value [String] Bibitem xml serialisation.
# @option value [String] Bibitem xml serialisation.
def save_entry(key, value)
@db.nil? || (@db[key] = value)
@local_db.nil? || (@local_db[key] = value)
end
# list all entries as a serialization
# @return [String]
private
# @param code [String] code of standard
# @return [Symbol] standard class name
def standard_class(code)
@registry.processors.each do |name, processor|
return name if /^#{processor.prefix}/.match(code) ||
processor.defaultprefix.match(code)
end
allowed = @registry.processors.reduce([]) do |m, (_k, v)|
m << v.prefix
end
warn "#{code} does not have a recognised prefix: #{allowed.join(', ')}"
nil
end
# TODO: i18n
# Fofmat ID
# @param code [String]
# @param year [String]
# @param opts [Hash]
# @param stdClass [Symbol]
# @return [Array]
def std_id(code, year, opts, stdclass)
prefix, code = strip_id_wrapper(code, stdclass)
ret = code
ret += ":#{year}" if year
ret += " (all parts)" if opts[:all_parts]
["#{prefix}(#{ret})", code]
end
# Find prefix and clean code
# @param code [String]
# @param stdClass [Symbol]
# @return [Array]
def strip_id_wrapper(code, stdclass)
prefix = @registry.processors[stdclass].prefix
code = code.sub(/^#{prefix}\((.+)\)$/, "\\1")
[prefix, code]
end
def bib_retval(entry, stdclass)
entry =~ /^not_found/ ? nil : @registry.processors[stdclass].from_xml(entry)
end
# @param code [String]
# @param year [String]
# @param opts [Hash]
# @param stdclass [Symbol]
def check_bibliocache(code, year, opts, stdclass)
id, searchcode = std_id(code, year, opts, stdclass)
db = @local_db || @db
altdb = @local_db && @db ? @db : nil
return bib_retval(new_bib_entry(searchcode, year, opts, stdclass), stdclass) if db.nil?
db.delete(id) unless db.valid_entry?(id, year)
if altdb
db[id] ||= altdb[id]
db[id] ||= new_bib_entry(searchcode, year, opts, stdclass)
altdb[id] = db[id] if !altdb.valid_entry?(id, year)
else
db[id] ||= new_bib_entry(searchcode, year, opts, stdclass)
end
bib_retval(db[id], stdclass)
end
# hash uses => , because the hash is imported from JSON
# @param code [String]
# @param year [String]
# @param opts [Hash]
# @param stdclass [Symbol]
# @return [Hash]
def new_bib_entry(code, year, opts, stdclass)
bib = @registry.processors[stdclass].get(code, year, opts)
bib = bib.to_xml if bib.respond_to? :to_xml
bib = "not_found #{Date.today}" if bib.nil? || bib.empty?
bib
end
# if cached reference is undated, expire it after 60 days
# @param bib [Hash]
# @param year [String]
# def valid_bib_entry?(bib, year)
# bib&.is_a?(Hash) && bib&.has_key?("bib") && bib&.has_key?("fetched") &&
# (year || Date.today - bib["fetched"] < 60)
# end
# @param dir [String] DB directory
# @param global [TrueClass, FalseClass]
# @return [PStore]
def open_cache_biblio(dir, global: true)
return nil if dir.nil?
db = DbCache.new dir
if File.exist? dir
if global
unless db.check_version?
FileUtils.rm_rf(Dir.glob(dir + '/*'), secure: true)
warn "Global cache version is obsolete and cleared."
end
db.set_version
elsif db.check_version? then db
else
warn "Local cache version is obsolete."
nil
end
else db.set_version
end
end
# Check if version of the DB match to the gem version.
# @param cache_db [String] DB directory
# @return [TrueClass, FalseClass]
# def check_cache_version(cache_db)
# cache_db.transaction { cache_db[:version] == VERSION }
# end
# Set version of the DB to the gem version.
# @param cache_db [String] DB directory
# @return [Pstore]
# def set_cache_version(cache_db)
# unless File.exist? cache_db.path
# cache_db.transaction { cache_db[:version] = VERSION }
# end
# cache_db
# end
# @param enstry [String] entry in XML format
# @return [IsoBibItem::IsoBibliographicItem]
# def from_xml(entry)
# IsoBibItem.from_xml entry # will be unmarshaller
# end
# @param [Hash{String=>Hash{String=>String}}] biblio
# def save_cache_biblio(biblio, filename)
# return if biblio.nil? || filename.nil?
# File.open(filename, "w") do |b|
# b << biblio.reduce({}) do |s, (k, v)|
# bib = v["bib"].respond_to?(:to_xml) ? v["bib"].to_xml : v["bib"]
# s.merge(k => { "fetched" => v["fetched"], "bib" => bib })
# end.to_json
# end
# end
end
|
Falkor/falkorlib | lib/falkorlib/git/base.rb | FalkorLib.Git.list_branch | ruby | def list_branch(path = Dir.pwd)
cg = MiniGit::Capturing.new(path)
res = cg.branch :a => true
res = res.split("\n")
# Eventually reorder to make the first element of the array the current branch
i = res.find_index { |e| e =~ /^\*\s/ }
res[0], res[i] = res[i], res[0] unless (i.nil? || i.zero?)
res.each { |e| e.sub!(/^\*?\s+/, '') }
res
end | Get an array of the local branches present (first element is always the
current branch) | train | https://github.com/Falkor/falkorlib/blob/1a6d732e8fd5550efb7c98a87ee97fcd2e051858/lib/falkorlib/git/base.rb#L181-L190 | module Git
module_function
## Check if a git directory has been initialized
def init?(path = Dir.pwd)
begin
MiniGit.new(path)
rescue Exception
return false
end
true
end
## Check if the repositories already holds some commits
def commits?(path)
res = false
Dir.chdir(path) do
_stdout, _stderr, exit_status = Open3.capture3( "git rev-parse HEAD" )
res = (exit_status.to_i.zero?)
end
res
end
## Check the availability of a given git command
def command?(cmd)
cg = MiniGit::Capturing.new
cmd_list = cg.help :a => true
# typical run:
# usage: git [--version] [--help] [-C <path>] [-c name=value]
# [--exec-path[=<path>]] [--html-path] [--man-path] [--info-path]
# [-p|--paginate|--no-pager] [--no-replace-objects] [--bare]
# [--git-dir=<path>] [--work-tree=<path>] [--namespace=<name>]
# <command> [<args>]
#
# available git commands in '/usr/local/Cellar/git/1.8.5.2/libexec/git-core'
#
# add [...] \
# [...] | The part we are interested in, delimited by '\n\n' sequence
# [...] /
#
# 'git help -a' and 'git help -g' lists available subcommands and some
# concept guides. See 'git help <command>' or 'git help <concept>'
# to read about a specific subcommand or concept
l = cmd_list.split("\n\n")
l.shift # useless first part
#ap l
subl = l.each_index.select { |i| l[i] =~ /^\s\s+/ } # find sublines that starts with at least two whitespaces
#ap subl
return false if subl.empty?
subl.any? { |i| l[i].split.include?(cmd) }
end
###
# Initialize a git repository
##
def init(path = Dir.pwd, _options = {})
# FIXME: for travis test: ensure the global git configurations
# 'user.email' and 'user.name' are set
[ 'user.name', 'user.email' ].each do |userconf|
next unless MiniGit[userconf].nil?
warn "The Git global configuration '#{userconf}' is not set so"
warn "you should *seriously* consider setting them by running\n\t git config --global #{userconf} 'your_#{userconf.sub(/\./, '_')}'"
default_val = ENV['USER']
default_val += '@domain.org' if userconf =~ /email/
warn "Now putting a default value '#{default_val}' you could change later on"
run %(
git config --global #{userconf} "#{default_val}"
)
#MiniGit[userconf] = default_val
end
exit_status = 1
Dir.mkdir( path ) unless Dir.exist?( path )
Dir.chdir( path ) do
execute "git init" unless FalkorLib.config.debug
exit_status = $?.to_i
end
# #puts "#init #{path}"
# Dir.chdir( "#{path}" ) do
# %x[ pwd && git init ] unless FalkorLib.config.debug
# end
exit_status
end
# Return the Git working tree from the proposed path (current directory by default)
def rootdir(path = Dir.pwd)
g = MiniGit.new
g.find_git_dir(path)[1]
end
# Return the git root directory for the path (current directory by default)
def gitdir(path = Dir.pwd)
g = MiniGit.new
g.find_git_dir(path)[0]
end
# Create a new branch
def create_branch(branch, path = Dir.pwd)
#ap method(__method__).parameters.map { |arg| arg[1] }
g = MiniGit.new(path)
error "not yet any commit performed -- You shall do one" unless commits?(path)
g.branch branch.to_s
end
# Delete a branch.
def delete_branch(branch, path = Dir.pwd, opts = { :force => false })
g = MiniGit.new(path)
error "'#{branch}' is not a valid existing branch" unless list_branch(path).include?( branch )
g.branch ((opts[:force]) ? :D : :d) => branch.to_s
end
###### config ######
# Retrieve the Git configuration
# You can propose a pattern as key
# Supported options:
# * :list [boolean] list all configurations
# * :hash [boolean] return a Hash
##
def config(key, dir = Dir.pwd, options = {})
#info "Retrieve the Git configuration"
res = nil
if (options[:list] || (key.is_a? Regexp) || (key =~ /\*/))
cg = MiniGit::Capturing.new(dir)
res = (cg.config :list => true).split("\n")
res.select! { |e| e.match(/^#{key}/) } unless key == '*'
#res = res.map { |e| e.split('=') }.to_h if options[:hash]
res = Hash[ res.map { |e| e.split('=') } ] if options[:hash]
else
g = MiniGit.new(dir)
res = g[key]
res = { key => g[key] } if options[:hash]
end
#ap res
res
end
## Fetch the latest changes
def fetch(path = Dir.pwd)
Dir.chdir( path ) do
execute "git fetch --all -v"
end
end
## Get an array of the local branches present (first element is always the
## current branch)
## Get the current git branch
def branch?(path = Dir.pwd)
list_branch(path)[0]
end
## Grab a remote branch
def grab(branch, path = Dir.pwd, remote = 'origin')
exit_status = 1
error "no branch provided" if branch.nil?
#remotes = FalkorLib::Git.remotes(path)
branches = FalkorLib::Git.list_branch(path)
if branches.include? "remotes/#{remote}/#{branch}"
info "Grab the branch '#{remote}/#{branch}'"
exit_status = execute_in_dir(FalkorLib::Git.rootdir( path ), "git branch --track #{branch} #{remote}/#{branch}")
else
warning "the remote branch '#{remote}/#{branch}' cannot be found"
end
exit_status
end
## Publish a branch on the remote
def publish(branch, path = Dir.pwd, remote = 'origin')
exit_status = 1
error "no branch provided" if branch.nil?
#remotes = FalkorLib::Git.remotes(path)
branches = FalkorLib::Git.list_branch(path)
Dir.chdir(FalkorLib::Git.rootdir( path ) ) do
if branches.include? "remotes/#{remote}/#{branch}"
warning "the remote branch '#{remote}/#{branch}' already exists"
else
info "Publish the branch '#{branch}' on the remote '#{remote}'"
exit_status = run %(
git push #{remote} #{branch}:refs/heads/#{branch}
git fetch #{remote}
git branch -u #{remote}/#{branch} #{branch}
)
end
end
exit_status
end
## List the files currently under version
def list_files(path = Dir.pwd)
g = MiniGit.new(path)
g.capturing.ls_files.split
end
## Add a file/whatever to Git and commit it
# Supported options:
# * :force [boolean]: force the add
def add(path, msg = "", options = {})
exit_status = 0
dir = File.realpath(File.dirname(path))
root = rootdir(path)
relative_path_to_root = Pathname.new( File.realpath(path) ).relative_path_from Pathname.new(root)
real_msg = ((msg.empty?) ? "add '#{relative_path_to_root}'" : msg)
opts = '-f' if options[:force]
Dir.chdir( dir ) do
exit_status = run %(
git add #{opts} #{path}
git commit -s -m "#{real_msg}" #{path}
)
end
exit_status.to_i
end
## Check if a git directory is in dirty mode
# git diff --shortstat 2> /dev/null | tail -n1
def dirty?(path = Dir.pwd)
g = MiniGit.new(path)
a = g.capturing.diff :shortstat => true
#ap a
!a.empty?
end
## Get a hash table of tags under the format
# { <tag> => <commit> }
def list_tag(path = Dir.pwd)
res = {}
cg = MiniGit::Capturing.new(path)
unless (cg.tag :list => true).empty?
# git show-ref --tags
a = (cg.show_ref :tags => true).split("\n")
res = Hash[ a.collect { |item| item.split(' refs/tags/') } ].invert
end
res
end # list_tag
## Get the last tag commit, or nil if no tag can be found
def last_tag_commit(path = Dir.pwd)
res = ""
g = MiniGit.new(path)
unless (g.capturing.tag :list => true).empty?
# git rev-list --tags --max-count=1
res = (g.capturing.rev_list :tags => true, :max_count => 1).chomp
end
res
end # last_tag_commit
## Create a new tag
# You can add extra options to the git tag command through the opts hash.
# Ex:
# FalkorLib::Git.tag('name', dir, { :delete => true } )
#
def tag(name, path = Dir.pwd, opts = {})
g = MiniGit.new(path)
g.tag opts, name
end # tag
## List of Git remotes
def remotes(path = Dir.pwd)
g = MiniGit.new(path)
g.capturing.remote.split
end
## Check existence of remotes
def remotes?(path = Dir.pwd)
!remotes(path).empty?
end
# Create a new remote <name> targeting url <url>
# You can pass additional options expected by git remote add in <opts>,
# for instance as follows:
#
# create_remote('origin', url, dir, { :fetch => true })
#
def create_remote(name, url, path = Dir.pwd, opts = {})
g = MiniGit.new(path)
g.remote :add, opts, name, url.to_s
end
# Delete a branch.
# def delete_branch(branch, path = Dir.pwd, opts = { :force => false })
# g = MiniGit.new(path)
# error "'#{branch}' is not a valid existing branch" unless list_branch(path).include?( branch )
# g.branch (opts[:force] ? :D : :d) => "#{branch}"
# end
###
# Initialize git submodule from the configuration
##
def submodule_init(path = Dir.pwd, submodules = FalkorLib.config.git[:submodules], _options = {})
exit_status = 1
git_root_dir = rootdir(path)
if File.exist?("#{git_root_dir}/.gitmodules")
unless submodules.empty?
# TODO: Check if it contains all submodules of the configuration
end
end
#ap FalkorLib.config.git
Dir.chdir(git_root_dir) do
exit_status = FalkorLib::Git.submodule_update( git_root_dir )
submodules.each do |subdir, conf|
next if conf[:url].nil?
url = conf[:url]
dir = "#{FalkorLib.config.git[:submodulesdir]}/#{subdir}"
branch = (conf[:branch].nil?) ? 'master' : conf[:branch]
if File.directory?( dir )
puts " ... the git submodule '#{subdir}' is already setup."
else
info "adding Git submodule '#{dir}' from '#{url}'"
exit_status = run %(
git submodule add -b #{branch} #{url} #{dir}
git commit -s -m "Add Git submodule '#{dir}' from '#{url}'" .gitmodules #{dir}
)
end
end
end
exit_status
end
## Update the Git submodules to the **local** registered version
def submodule_update(path = Dir.pwd)
execute_in_dir(rootdir(path),
%(
git submodule init
git submodule update
))
end
## Upgrade the Git submodules to the latest HEAD version from the remote
def submodule_upgrade(path = Dir.pwd)
execute_in_dir(rootdir(path),
%{
git submodule foreach 'git fetch origin; git checkout $(git rev-parse --abbrev-ref HEAD); git reset --hard origin/$(git rev-parse --abbrev-ref HEAD); git submodule update --recursive; git clean -dfx'
})
end
## Initialize git subtrees from the configuration
def subtree_init(path = Dir.pwd)
raise ArgumentError, "Git 'subtree' command is not available" unless FalkorLib::Git.command? "subtree"
if FalkorLib.config.git[:subtrees].empty?
FalkorLib::Git.config_warn(:subtrees)
return 1
end
exit_status = 0
git_root_dir = rootdir(path)
Dir.chdir(git_root_dir) do
FalkorLib.config.git[:subtrees].each do |dir, conf|
next if conf[:url].nil?
url = conf[:url]
remote = dir.gsub(/\//, '-')
branch = (conf[:branch].nil?) ? 'master' : conf[:branch]
remotes = FalkorLib::Git.remotes
unless remotes.include?( remote )
info "Initialize Git remote '#{remote}' from URL '#{url}'"
exit_status = execute "git remote add --no-tags -f #{remote} #{url}"
end
unless File.directory?( File.join(git_root_dir, dir) )
info "initialize Git subtree '#{dir}'"
exit_status = execute "git subtree add --prefix #{dir} --squash #{remote}/#{branch}"
end
end
end
exit_status
end
## Check if the subtrees have been initialized.
## Actually based on a naive check of sub-directory existence
def subtree_init?(path = Dir.pwd)
res = true
FalkorLib.config.git[:subtrees].keys.each do |dir|
res &&= File.directory?(File.join(path, dir))
end
res
end # subtree_init?
## Show difference between local subtree(s) and their remotes"
def subtree_diff(path = Dir.pwd)
raise ArgumentError, "Git 'subtree' command is not available" unless FalkorLib::Git.command? "subtree"
if FalkorLib.config.git[:subtrees].empty?
FalkorLib::Git.config_warn(:subtrees)
return 1
end
exit_status = 0
git_root_dir = rootdir(path)
Dir.chdir(git_root_dir) do
FalkorLib.config.git[:subtrees].each do |dir, conf|
next if conf[:url].nil?
#url = conf[:url]
remote = dir.gsub(/\//, '-')
branch = (conf[:branch].nil?) ? 'master' : conf[:branch]
remotes = FalkorLib::Git.remotes
raise IOError, "The git remote '#{remote}' is not configured" unless remotes.include?( remote )
raise IOError, "The git subtree directory '#{dir}' does not exists" unless File.directory?( File.join(git_root_dir, dir) )
info "Git diff on subtree '#{dir}' with remote '#{remote}/#{branch}'"
exit_status = execute "git diff #{remote}/#{branch} #{FalkorLib::Git.branch?( git_root_dir )}:#{dir}"
end
end
exit_status
end
# Pull the latest changes, assuming the git repository is not dirty
def subtree_up(path = Dir.pwd)
error "Unable to pull subtree(s): Dirty Git repository" if FalkorLib::Git.dirty?( path )
exit_status = 0
git_root_dir = rootdir(path)
Dir.chdir(git_root_dir) do
FalkorLib.config.git[:subtrees].each do |dir, conf|
next if conf[:url].nil?
#url = conf[:url]
remote = dir.gsub(/\//, '-')
branch = (conf[:branch].nil?) ? 'master' : conf[:branch]
remotes = FalkorLib::Git.remotes
info "Pulling changes into subtree '#{dir}' using remote '#{remote}/#{branch}'"
raise IOError, "The git remote '#{remote}' is not configured" unless remotes.include?( remote )
info "\t\\__ fetching remote '#{remotes.join(',')}'"
FalkorLib::Git.fetch( git_root_dir )
raise IOError, "The git subtree directory '#{dir}' does not exists" unless File.directory?( File.join(git_root_dir, dir) )
info "\t\\__ pulling changes"
exit_status = execute "git subtree pull --prefix #{dir} --squash #{remote} #{branch}"
#exit_status = puts "git subtree pull --prefix #{dir} --squash #{remote} #{branch}"
end
end
exit_status
end
alias_method :subtree_pull, :subtree_up
# Raise a warning message if subtree/submodule section is not present
def config_warn(type = :subtrees)
warn "You shall setup 'Falkorlib.config.git[#{type.to_sym}]' to configure #{type} as follows:"
warn " FalkorLib.config.git do |c|"
warn " c[#{type.to_sym}] = {"
warn " '<subdir>' => {"
warn " :url => '<giturl>',"
warn " :branch => 'develop' # if different from master"
warn " },"
warn " }"
warn " end"
if type == :submodules
warn "This will configure the Git submodule into FalkorLib.config.git.submodulesdir"
warn "i.e. '#{FalkorLib.config.git[:submodulesdir]}'" if FalkorLib.config.git[:submodulesdir]
end
end
end # module FalkorLib::Git
|
rmagick/rmagick | ext/RMagick/extconf.rb | RMagick.Extconf.set_archflags_for_osx | ruby | def set_archflags_for_osx
archflags = []
fullpath = `which convert`
fileinfo = `file #{fullpath}`
# default ARCHFLAGS
archs = $ARCH_FLAG.scan(/-arch\s+(\S+)/).flatten
archs.each do |arch|
archflags << "-arch #{arch}" if fileinfo.include?(arch)
end
$ARCH_FLAG = archflags.join(' ') unless archflags.empty?
end | issue #169
set ARCHFLAGS appropriately for OSX | train | https://github.com/rmagick/rmagick/blob/ef6688ed9d76bf123c2ea1a483eff8635051adb7/ext/RMagick/extconf.rb#L232-L245 | class Extconf
require 'rmagick/version'
RMAGICK_VERS = ::Magick::VERSION
MIN_RUBY_VERS = ::Magick::MIN_RUBY_VERSION
attr_reader :headers
def initialize
@stdout = $stdout.dup
setup_paths_for_homebrew
configure_compile_options
assert_can_compile!
configure_headers
end
def setup_paths_for_homebrew
return unless find_executable('brew')
brew_pkg_config_path = "#{`brew --prefix imagemagick@6`.strip}/lib/pkgconfig"
pkgconfig_paths = ENV['PKG_CONFIG_PATH'].to_s.split(':')
if File.exist?(brew_pkg_config_path) && !pkgconfig_paths.include?(brew_pkg_config_path)
ENV['PKG_CONFIG_PATH'] = [ENV['PKG_CONFIG_PATH'], brew_pkg_config_path].compact.join(':')
end
end
def configured_compile_options
{
magick_version: $magick_version,
local_libs: $LOCAL_LIBS,
cflags: $CFLAGS,
cppflags: $CPPFLAGS,
ldflags: $LDFLAGS,
defs: $defs,
config_h: $config_h
}
end
def configure_headers
@headers = %w[assert.h ctype.h stdio.h stdlib.h math.h time.h]
headers << 'sys/types.h' if have_header('sys/types.h')
if have_header('magick/MagickCore.h')
headers << 'magick/MagickCore.h'
else
exit_failure "Can't install RMagick #{RMAGICK_VERS}. Can't find magick/MagickCore.h."
end
end
def configure_compile_options
# Magick-config is not available on Windows
if RUBY_PLATFORM !~ /mswin|mingw/
# Check for compiler. Extract first word so ENV['CC'] can be a program name with arguments.
config = defined?(RbConfig) ? ::RbConfig : ::Config
cc = (ENV['CC'] || config::CONFIG['CC'] || 'gcc').split(' ').first
exit_failure "No C compiler found in ${ENV['PATH']}. See mkmf.log for details." unless find_executable(cc)
magick_package = determine_imagemagick_package
$magick_version = `pkg-config #{magick_package} --modversion`[/^(\d+\.\d+\.\d+)/]
check_multiple_imagemagick_versions
check_partial_imagemagick_versions
# Ensure minimum ImageMagick version
# Check minimum ImageMagick version if possible
checking_for("outdated ImageMagick version (<= #{Magick::MIN_IM_VERSION})") do
Logging.message("Detected ImageMagick version: #{$magick_version}\n")
exit_failure "Can't install RMagick #{RMAGICK_VERS}. You must have ImageMagick #{Magick::MIN_IM_VERSION} or later.\n" if Gem::Version.new($magick_version) < Gem::Version.new(Magick::MIN_IM_VERSION)
end
# Save flags
$CFLAGS = ENV['CFLAGS'].to_s + ' ' + `pkg-config --cflags #{magick_package}`.chomp
$CPPFLAGS = ENV['CPPFLAGS'].to_s + ' ' + `pkg-config --cflags #{magick_package}`.chomp
$LDFLAGS = ENV['LDFLAGS'].to_s + ' ' + `pkg-config --libs #{magick_package}`.chomp
$LOCAL_LIBS = ENV['LIBS'].to_s + ' ' + `pkg-config --libs #{magick_package}`.chomp
set_archflags_for_osx if RUBY_PLATFORM =~ /darwin/ # osx
elsif RUBY_PLATFORM =~ /mingw/ # mingw
`identify -version` =~ /Version: ImageMagick (\d+\.\d+\.\d+)-+\d+ /
abort 'Unable to get ImageMagick version' unless Regexp.last_match(1)
$magick_version = Regexp.last_match(1)
dir_paths = search_paths_for_library_for_windows
$CPPFLAGS = %(-I"#{dir_paths[:include]}")
$LDFLAGS = %(-L"#{dir_paths[:lib]}")
have_library('CORE_RL_magick_')
have_library('X11')
else # mswin
`identify -version` =~ /Version: ImageMagick (\d+\.\d+\.\d+)-+\d+ /
abort 'Unable to get ImageMagick version' unless Regexp.last_match(1)
$magick_version = Regexp.last_match(1)
dir_paths = search_paths_for_library_for_windows
$CPPFLAGS << %( -I"#{dir_paths[:include]}")
$LDFLAGS << %( -libpath:"#{dir_paths[:lib]}")
$LOCAL_LIBS = 'CORE_RL_magick_.lib'
have_library('X11')
end
end
# Test for a specific value in an enum type
def have_enum_value(enum, value, headers = nil, &b)
checking_for "#{enum}.#{value}" do
if try_compile(<<"SRC", &b)
#{COMMON_HEADERS}
#{cpp_include(headers)}
/*top*/
int main() { #{enum} t = #{value}; t = t; return 0; }
SRC
$defs.push(format('-DHAVE_ENUM_%s', value.upcase))
true
else
false
end
end
end
# Test for multiple values of the same enum type
def have_enum_values(enum, values, headers = nil, &b)
values.each do |value|
have_enum_value(enum, value, headers, &b)
end
end
def exit_failure(msg)
msg = "ERROR: #{msg}"
Logging.message msg
@stdout.puts "\n\n"
if ENV['NO_COLOR']
@stdout.puts msg
else
@stdout.print "\e[31m\e[1m#{msg}\e[0m"
end
@stdout.puts "\n\n"
@stdout.flush
exit(1)
end
def determine_imagemagick_package
unless find_executable('pkg-config')
exit_failure "Can't install RMagick #{RMAGICK_VERS}. Can't find pkg-config in #{ENV['PATH']}\n"
end
packages = `pkg-config --list-all`.scan(/(ImageMagick\-6[\.A-Z0-9]+) .*/).flatten
# For ancient version of ImageMagick 6 we need a different regex
if packages.empty?
packages = `pkg-config --list-all`.scan(/(ImageMagick) .*/).flatten
end
if packages.empty?
exit_failure "Can't install RMagick #{RMAGICK_VERS}. Can't find ImageMagick with pkg-config\n"
end
if packages.length > 1
package_lines = packages.map { |package| " - #{package}" }.join("\n")
msg = "\nWarning: Found more than one ImageMagick installation. This could cause problems at runtime.\n#{package_lines}\n\n"
Logging.message msg
message msg
end
packages.first
end
# Seems like lots of people have multiple versions of ImageMagick installed.
def check_multiple_imagemagick_versions
versions = []
path = ENV['PATH'].split(File::PATH_SEPARATOR)
path.each do |dir|
file = File.join(dir, 'Magick-config')
next unless File.executable? file
vers = `#{file} --version`.chomp.strip
prefix = `#{file} --prefix`.chomp.strip
versions << [vers, prefix, dir]
end
versions.uniq!
return unless versions.size > 1
msg = "\nWarning: Found more than one ImageMagick installation. This could cause problems at runtime.\n"
versions.each do |vers, prefix, dir|
msg << " #{dir}/Magick-config reports version #{vers} is installed in #{prefix}\n"
end
msg << "Using #{versions[0][0]} from #{versions[0][1]}.\n\n"
Logging.message msg
message msg
end
# Ubuntu (maybe other systems) comes with a partial installation of
# ImageMagick in the prefix /usr (some libraries, no includes, and no
# binaries). This causes problems when /usr/lib is in the path (e.g., using
# the default Ruby installation).
def check_partial_imagemagick_versions
prefix = config_string('prefix') || ''
matches = [
prefix + '/lib/lib?agick*',
prefix + '/include/ImageMagick',
prefix + '/bin/Magick-config'
].map do |file_glob|
Dir.glob(file_glob)
end
matches.delete_if(&:empty?)
return unless !matches.empty? && matches.length < 3
msg = "\nWarning: Found a partial ImageMagick installation. Your operating system likely has some built-in ImageMagick libraries but not all of ImageMagick. This will most likely cause problems at both compile and runtime.\nFound partial installation at: " + prefix + "\n"
Logging.message msg
message msg
end
# issue #169
# set ARCHFLAGS appropriately for OSX
def search_paths_for_library_for_windows
msg = 'searching PATH for the ImageMagick library...'
Logging.message msg
message msg + "\n"
found_lib = false
dir_paths = {}
paths = ENV['PATH'].split(File::PATH_SEPARATOR)
paths.each do |dir|
lib = File.join(dir, 'lib')
lib_file = File.join(lib, 'CORE_RL_magick_.lib')
next unless File.exist?(lib_file)
dir_paths[:include] = File.join(dir, 'include')
dir_paths[:lib] = lib
found_lib = true
break
end
return dir_paths if found_lib
exit_failure <<END_MINGW
Can't install RMagick #{RMAGICK_VERS}.
Can't find the ImageMagick library.
Retry with '--with-opt-dir' option.
Usage: gem install rmagick -- '--with-opt-dir=\"[path to ImageMagick]\"'
e.g.
gem install rmagick -- '--with-opt-dir=\"C:\Program Files\ImageMagick-6.9.1-Q16\"'
END_MINGW
end
def assert_can_compile!
assert_minimum_ruby_version!
assert_has_dev_libs!
end
def assert_minimum_ruby_version!
unless checking_for("Ruby version >= #{MIN_RUBY_VERS}") do
Gem::Version.new(RUBY_VERSION) >= Gem::Version.new(MIN_RUBY_VERS)
end
exit_failure "Can't install RMagick #{RMAGICK_VERS}. Ruby #{MIN_RUBY_VERS} or later required.\n"
end
end
def assert_has_dev_libs!
return unless RUBY_PLATFORM !~ /mswin|mingw/
unless `pkg-config --libs MagickCore`[/\bl\s*(MagickCore|Magick)6?\b/]
exit_failure "Can't install RMagick #{RMAGICK_VERS}. " \
"Can't find the ImageMagick library or one of the dependent libraries. " \
"Check the mkmf.log file for more detailed information.\n"
end
end
def create_header_file
have_func('snprintf', headers)
[
'GetImageChannelEntropy', # 6.9.0-0
'SetImageGray' # 6.9.1-10
].each do |func|
have_func(func, headers)
end
# Miscellaneous constants
$defs.push("-DRUBY_VERSION_STRING=\"ruby #{RUBY_VERSION}\"")
$defs.push("-DRMAGICK_VERSION_STRING=\"RMagick #{RMAGICK_VERS}\"")
if Gem::Version.new($magick_version) >= Gem::Version.new('6.8.9')
$defs.push('-DIMAGEMAGICK_GREATER_THAN_EQUAL_6_8_9=1')
end
create_header
end
def create_makefile_file
create_header_file
# Prior to 1.8.5 mkmf duplicated the symbols on the command line and in the
# extconf.h header. Suppress that behavior by removing the symbol array.
$defs = []
# Force re-compilation if the generated Makefile changed.
$config_h = 'Makefile rmagick.h'
create_makefile('RMagick2')
print_summary
end
def print_summary
summary = <<"END_SUMMARY"
#{'=' * 70}
#{DateTime.now.strftime('%a %d %b %y %T')}
This installation of RMagick #{RMAGICK_VERS} is configured for
Ruby #{RUBY_VERSION} (#{RUBY_PLATFORM}) and ImageMagick #{$magick_version}
#{'=' * 70}
END_SUMMARY
Logging.message summary
message summary
end
end
|
mitukiii/userstream | lib/user_stream/configuration.rb | UserStream.Configuration.reset | ruby | def reset
self.consumer_key = DEFAULT_CONSUMER_KEY
self.consumer_secret = DEFAULT_CONSUMER_SECRET
self.oauth_token = DEFAULT_OAUTH_TOKEN
self.oauth_token_secret = DEFAULT_OAUTH_TOKEN_SECRET
self.endpoint = DEFAULT_ENDPOINT
self.user_agent = DEFAULT_USER_AGENT
self.timeout = DEFAULT_TIMEOUT
self
end | Reset all configuration options to defaults | train | https://github.com/mitukiii/userstream/blob/f37e7931f7f934422ae6cbdee10da40715c6b68b/lib/user_stream/configuration.rb#L65-L74 | module Configuration
# An array of keys in the options hash when configuring a {UserStream::API}
OPTIONS_KEYS = [
:consumer_key,
:consumer_secret,
:oauth_token,
:oauth_token_secret,
:endpoint,
:user_agent,
:timeout,
].freeze
# By default, don't set a consumer key
DEFAULT_CONSUMER_KEY = nil
# By default, don't set a consumer secret
DEFAULT_CONSUMER_SECRET = nil
# By default, don't set an oauth token
DEFAULT_OAUTH_TOKEN = nil
# By default, don't set an oauth token secret
DEFAULT_OAUTH_TOKEN_SECRET = nil
# The endpoint that will be used to connect if none is set
#
# @note Specify a different API version, or use a Twitter-compatible endpoint.
# @see https://dev.twitter.com/docs/streaming-api/user-streams
DEFAULT_ENDPOINT = 'https://userstream.twitter.com'.freeze
# The user agent that will be sent to the API endpoint if none is set
DEFAULT_USER_AGENT = "UserStream/#{VERSION} (https://github.com/mitukiii/userstream)".freeze
# The timeout that will be to used to connect if none is set
#
# @see https://dev.twitter.com/docs/streaming-apis/connecting#Stalls
DEFAULT_TIMEOUT = 90
# @private
attr_accessor *OPTIONS_KEYS
# When this module is extended, set all configuration options to their default values
def self.extended(base)
base.reset
end
# Convenience method to allow configuration options to be set in a block
def configure
yield self
self
end
# Create a hash of options and their values
def options
OPTIONS_KEYS.inject({}) do |options, key|
options.merge!(key => send(key))
end
end
# Reset all configuration options to defaults
end
|
zhimin/rwebspec | lib/rwebspec-watir/web_browser.rb | RWebSpec.WebBrowser.new_popup_window | ruby | def new_popup_window(options, browser = "ie")
if is_firefox?
raise "not implemented"
else
if options[:url]
Watir::IE.attach(:url, options[:url])
elsif options[:title]
Watir::IE.attach(:title, options[:title])
else
raise 'Please specify title or url of new pop up window'
end
end
end | Attach a Watir::IE instance to a popup window.
Typical usage
new_popup_window(:url => "http://www.google.com/a.pdf") | train | https://github.com/zhimin/rwebspec/blob/aafccee2ba66d17d591d04210067035feaf2f892/lib/rwebspec-watir/web_browser.rb#L510-L522 | class WebBrowser
attr_accessor :context
def initialize(base_url = nil, existing_browser = nil, options = {})
default_options = {:speed => "zippy",
:visible => true,
:highlight_colour => 'yellow',
:close_others => true
}
options = default_options.merge options
@context = Context.new base_url if base_url
initialize_ie_browser(existing_browser, options)
end
def initialize_ie_browser(existing_browser, options)
@browser = existing_browser || Watir::IE.new
if ($TESTWISE_EMULATE_TYPING && $TESTWISE_TYPING_SPEED) then
@browser.set_slow_speed if $TESTWISE_TYPING_SPEED == "slow"
@browser.set_fast_speed if $TESTWISE_TYPING_SPEED == 'fast'
else
@browser.speed = :zippy
end
return if existing_browser
# Watir-classic 3.4 drop the support
# @browser.activeObjectHighLightColor = options[:highlight_colour]
@browser.visible = options[:visible] unless $HIDE_IE
#NOTE: close_others fails
begin
if options[:close_others] then
@browser.windows.reject(&:current?).each(&:close)
end
rescue => e1
puts "Failed to close others"
end
end
def self.reuse(base_url, options)
if self.is_windows? && ($TESTWISE_BROWSER != "Firefox" && $TESTWISE_BROWSER != "Firefox")
require 'watir-classic'
# try to avoid
# lib/ruby/1.8/dl/win32.rb:11:in `sym': unknown type specifier 'v'
Watir::IE.each do |browser_window|
return WebBrowser.new(base_url, browser_window, options)
end
#puts "no browser instance found"
WebBrowser.new(base_url, nil, options)
else
WebBrowser.new(base_url, nil, options)
end
end
# for popup windows
def self.new_from_existing(underlying_browser, web_context = nil)
return WebBrowser.new(web_context ? web_context.base_url : nil, underlying_browser, {:close_others => false})
end
##
# Delegate to Watir
#
[:button, :td, :checkbox, :div, :form, :frame, :h1, :h2, :h3, :h4, :h5, :h6, :hidden, :image, :li, :link, :map, :pre, :tr, :radio, :select_list, :span, :table, :text_field, :paragraph, :file_field, :label].each do |method|
define_method method do |*args|
@browser.send(method, *args)
end
end
alias cell td
alias check_box checkbox # seems watir doc is wrong, checkbox not check_box
alias row tr
alias a link
alias img image
def area(*args)
@browser.send("area", *args)
end
def modal_dialog(how=nil, what=nil)
@browser.modal_dialog(how, what)
end
# This is the main method for accessing a generic element with a given attibute
# * how - symbol - how we access the element. Supports all values except :index and :xpath
# * what - string, integer or regular expression - what we are looking for,
#
# Valid values for 'how' are listed in the Watir Wiki - http://wiki.openqa.org/display/WTR/Methods+supported+by+Element
#
# returns an Watir::Element object
#
# Typical Usage
#
# element(:class, /foo/) # access the first element with class 'foo'. We can use a string in place of the regular expression
# element(:id, "11") # access the first element that matches an id
def element(how, what)
return @browser.element(how, what)
end
# this is the main method for accessing generic html elements by an attribute
#
# Returns a HTMLElements object
#
# Typical usage:
#
# elements(:class, 'test').each { |l| puts l.to_s } # iterate through all elements of a given attribute
# elements(:alt, 'foo')[1].to_s # get the first element of a given attribute
# elements(:id, 'foo').length # show how many elements are foung in the collection
#
def elements(how, what)
return @browser.elements(how, what)
end
def show_all_objects
@browser.show_all_objects
end
# Returns the specified ole object for input elements on a web page.
#
# This method is used internally by Watir and should not be used externally. It cannot be marked as private because of the way mixins and inheritance work in watir
#
# * how - symbol - the way we look for the object. Supported values are
# - :name
# - :id
# - :index
# - :value etc
# * what - string that we are looking for, ex. the name, or id tag attribute or index of the object we are looking for.
# * types - what object types we will look at.
# * value - used for objects that have one name, but many values. ex. radio lists and checkboxes
def locate_input_element(how, what, types, value=nil)
@browser.locate_input_element(how, what, types, value)
end
# This is the main method for accessing map tags - http://msdn.microsoft.com/workshop/author/dhtml/reference/objects/map.asp?frame=true
# * how - symbol - how we access the map,
# * what - string, integer or regular expression - what we are looking for,
#
# Valid values for 'how' are listed in the Watir Wiki - http://wiki.openqa.org/display/WTR/Methods+supported+by+Element
#
# returns a map object
#
# Typical Usage
#
# map(:id, /list/) # access the first map that matches list.
# map(:index,2) # access the second map on the page
# map(:title, "A Picture") # access a map using the tooltip text. See http://msdn.microsoft.com/workshop/author/dhtml/reference/properties/title_1.asp?frame=true
#
def map(how, what=nil)
@browser.map(how, what)
end
def contains_text(text)
@browser.contains_text(text);
end
# return HTML of current web page
def page_source
@browser.html()
#@browser.document.body
end
alias html_body page_source
alias html page_source
# return plain text of current web page
def text
@browser.text
end
def page_title
case @browser.class.to_s
when "Watir::IE"
@browser.document.title
else
@browser.title
end
end
[:images, :links, :buttons, :select_lists, :checkboxes, :radios, :text_fields, :divs, :dls, :dds, :dts, :ems, :lis, :maps, :spans, :strongs, :ps, :pres, :labels, :tds, :trs].each do |method|
define_method method do
@browser.send(method)
end
end
alias as links
alias rows trs
alias cells tds
alias imgs images
# current url
def url
@browser.url
end
def base_url=(new_base_url)
if @context
@conext.base_url = new_base_url
return
end
@context = Context.new base_url
end
def is_firefox?
return false
end
# Close the browser window. Useful for automated test suites to reduce
# test interaction.
def close_browser
@browser.close
sleep 2
end
alias close close_browser
def close_all_browsers(browser_type = :ie)
@browser.windows.each(&:close)
end
def full_url(relative_url)
if @context && @context.base_url
@context.base_url + relative_url
else
relative_url
end
end
def begin_at(relative_url)
@browser.goto full_url(relative_url)
end
def browser_opened?
begin
@browser != nil
rescue => e
return false
end
end
# Some browsers (i.e. IE) need to be waited on before more actions can be
# performed. Most action methods in Watir::Simple already call this before
# and after.
def wait_for_browser
# Watir 3 does not support it any more
# @browser.waitForIE unless is_firefox?
end
# A convenience method to wait at both ends of an operation for the browser
# to catch up.
def wait_before_and_after
wait_for_browser
yield
wait_for_browser
end
[:back, :forward, :refresh, :focus, :close_others].each do |method|
define_method(method) do
@browser.send(method)
end
end
alias refresh_page refresh
alias go_back back
alias go_forward forward
# Go to a page
# Usage:
# open_browser(:base_url => "http://www.itest2.com")
# ....
# goto_page("/purchase") # full url => http://www.itest.com/purchase
def goto_page(page)
# puts "DEBUG calling goto page => #{page}"
@browser.goto full_url(page);
end
# Go to a URL directly
# goto_url("http://www.itest2.com/downloads")
def goto_url(url)
@browser.goto url
end
# text fields
def enter_text_into_field_with_name(name, text)
if is_firefox?
wait_before_and_after { text_field(:name, name).value = text }
sleep 0.3
else
wait_before_and_after { text_field(:name, name).set(text) }
end
end
alias set_form_element enter_text_into_field_with_name
alias enter_text enter_text_into_field_with_name
alias set_hidden_field set_form_element
#links
def click_link_with_id(link_id, opts = {})
if opts && opts[:index]
wait_before_and_after { link(:id => link_id, :index => opts[:index]).click }
else
wait_before_and_after { link(:id, link_id).click }
end
end
def click_link_with_text(text, opts = {})
if opts && opts[:index]
wait_before_and_after { link(:text => text, :index => opts[:index]).click }
else
wait_before_and_after { link(:text, text).click }
end
end
alias click_link click_link_with_text
# Click a button with give HTML id
# Usage:
# click_button_with_id("btn_sumbit")
def click_button_with_id(id, opts = {})
if opts && opts[:index]
wait_before_and_after { button(:id => id, :index => opts[:index]).click }
else
wait_before_and_after { button(:id, id).click }
end
end
# Click a button with give name
# Usage:
# click_button_with_name("confirm")
def click_button_with_name(name, opts={})
if opts && opts[:index]
wait_before_and_after { button(:name => name, :index => opts[:index]).click }
else
wait_before_and_after { button(:name, name).click }
end
end
# Click a button with caption
# Usage:
# click_button_with_caption("Confirm payment")
def click_button_with_caption(caption, opts={})
if opts && opts[:index]
wait_before_and_after { button(:caption => caption, :index => opts[:index]).click }
else
wait_before_and_after { button(:caption, caption).click }
end
end
alias click_button click_button_with_caption
alias click_button_with_text click_button_with_caption
# Click a button with value
# Usage:
# click_button_with_value("Confirm payment")
def click_button_with_value(value, opts={})
if opts && opts[:index]
wait_before_and_after { button(:value => value, :index => opts[:index]).click }
else
wait_before_and_after { button(:value, value).click }
end
end
# Select a dropdown list by name
# Usage:
# select_option("country", "Australia")
def select_option(selectName, option)
select_list(:name, selectName).select(option)
end
# submit first submit button
def submit(buttonName = nil)
if (buttonName.nil?) then
buttons.each { |button|
next if button.type != 'submit'
button.click
return
}
else
click_button_with_name(buttonName)
end
end
# Check a checkbox
# Usage:
# check_checkbox("agree")
# check_checkbox("agree", "true")
def check_checkbox(checkBoxName, values=nil)
if values
values.class == Array ? arys = values : arys = [values]
arys.each {|cbx_value|
if Watir::VERSION =~ /^1/ then
checkbox(:name, checkBoxName, cbx_value).set
else
checkbox(:name => checkBoxName, :value => cbx_value).set
end
}
else
checkbox(:name, checkBoxName).set
end
end
# Check a checkbox
# Usage:
# uncheck_checkbox("agree")
# uncheck_checkbox("agree", "false")
def uncheck_checkbox(checkBoxName, values = nil)
if values
values.class == Array ? arys = values : arys = [values]
arys.each {|cbx_value|
if Watir::VERSION =~ /^1/ then
checkbox(:name, checkBoxName, cbx_value).clear
else
checkbox(:name => checkBoxName, :value => cbx_value).clear
end
}
else
checkbox(:name, checkBoxName).clear
end
end
# Click a radio button
# Usage:
# click_radio_option("country", "Australia")
def click_radio_option(radio_group, radio_option)
if Watir::VERSION =~ /^1/ then
radio(:name, radio_group, radio_option).set
else
radio(:name => radio_group, :value => radio_option).set
end
end
alias click_radio_button click_radio_option
# Clear a radio button
# Usage:
# click_radio_option("country", "Australia")
def clear_radio_option(radio_group, radio_option)
if Watir::VERSION =~ /^2/ then
radio(:name => radio_group, :value => radio_option).clear
else
radio(:name, radio_group, radio_option).clear
end
end
alias clear_radio_button clear_radio_option
# Deprecated: using Watir style directly instead
def element_by_id(elem_id)
if is_firefox?
# elem = @browser.document.getElementById(elem_id)
# elem = div(:id, elem_id) || label(:id, elem_id) || button(:id, elem_id) ||
# span(:id, elem_id) || hidden(:id, elem_id) || link(:id, elem_id) || radio(:id, elem_id)
elem = browser.element_by_xpath("//*[@id='#{elem_id}']")
else
elem = @browser.document.getElementById(elem_id)
end
end
def element_value(elementId)
elem = element_by_id(elementId)
elem ? elem.invoke('innerText') : nil
end
def element_source(elementId)
elem = element_by_id(elementId)
assert_not_nil(elem, "HTML element: #{elementId} not exists")
elem.innerHTML
end
def select_file_for_upload(file_field, file_path)
normalized_file_path = RUBY_PLATFORM.downcase.include?("mingw") ? file_path.gsub("/", "\\") : file_path
file_field(:name, file_field).set(normalized_file_path)
end
# Watir 1.9
def javascript_dialog
@browser.javascript_dialog
end
def start_window(url = nil)
@browser.start_window(url);
end
# Attach to existing browser
#
# Usage:
# WebBrowser.attach_browser(:title, "iTest2")
# WebBrowser.attach_browser(:url, "http://www.itest2.com")
# WebBrowser.attach_browser(:url, "http://www.itest2.com", {:browser => "Firefox", :base_url => "http://www.itest2.com"})
# WebBrowser.attach_browser(:title, /agileway\.com\.au\/attachment/) # regular expression
def self.attach_browser(how, what, options={})
default_options = {:browser => "IE"}
options = default_options.merge(options)
site_context = Context.new(options[:base_url]) if options[:base_url]
return WebBrowser.new_from_existing(Watir::IE.attach(how, what), site_context)
end
# Attach a Watir::IE instance to a popup window.
#
# Typical usage
# new_popup_window(:url => "http://www.google.com/a.pdf")
def new_popup_window(options, browser = "ie")
if is_firefox?
raise "not implemented"
else
if options[:url]
Watir::IE.attach(:url, options[:url])
elsif options[:title]
Watir::IE.attach(:title, options[:title])
else
raise 'Please specify title or url of new pop up window'
end
end
end
# ---
# For deubgging
# ---
def dump_response(stream = nil)
stream.nil? ? puts(page_source) : stream.puts(page_source)
end
# A Better Popup Handler using the latest Watir version. Posted by [email protected]
#
# http://wiki.openqa.org/display/WTR/FAQ#FAQ-HowdoIattachtoapopupwindow%3F
#
def start_clicker( button, waitTime= 9, user_input=nil)
# get a handle if one exists
hwnd = @browser.enabled_popup(waitTime)
if (hwnd) # yes there is a popup
w = WinClicker.new
if ( user_input )
w.setTextValueForFileNameField( hwnd, "#{user_input}" )
end
# I put this in to see the text being input it is not necessary to work
sleep 3
# "OK" or whatever the name on the button is
w.clickWindowsButton_hwnd( hwnd, "#{button}" )
#
# this is just cleanup
w = nil
end
end
# return underlying browser
def ie
@browser
end
# Save current web page source to file
# usage:
# save_page("/tmp/01.html")
# save_page() => # will save to "20090830112200.html"
def save_page(file_name = nil)
file_name ||= Time.now.strftime("%Y%m%d%H%M%S") + ".html"
puts "about to save page: #{File.expand_path(file_name)}" if $DEBUG
File.open(file_name, "w").puts page_source
end
# Verify the next page following an operation.
#
# Typical usage:
# browser.expect_page HomePage
def expect_page(page_clazz, argument = nil)
if argument
page_clazz.new(self, argument)
else
page_clazz.new(self)
end
end
# is it running in MS Windows platforms?
def self.is_windows?
RUBY_PLATFORM.downcase.include?("mswin") or RUBY_PLATFORM.downcase.include?("mingw")
end
end
|
state-machines/state_machines | lib/state_machines/branch.rb | StateMachines.Branch.matches_requirement? | ruby | def matches_requirement?(query, option, requirement)
!query.include?(option) || requirement.matches?(query[option], query)
end | Verifies that an option in the given query matches the values required
for that option | train | https://github.com/state-machines/state_machines/blob/10b03af5fc9245bcb09bbd9c40c58ffba9a85422/lib/state_machines/branch.rb#L171-L173 | class Branch
include EvalHelpers
# The condition that must be met on an object
attr_reader :if_condition
# The condition that must *not* be met on an object
attr_reader :unless_condition
# The requirement for verifying the event being matched
attr_reader :event_requirement
# One or more requirements for verifying the states being matched. All
# requirements contain a mapping of {:from => matcher, :to => matcher}.
attr_reader :state_requirements
# A list of all of the states known to this branch. This will pull states
# from the following options (in the same order):
# * +from+ / +except_from+
# * +to+ / +except_to+
attr_reader :known_states
# Creates a new branch
def initialize(options = {}) #:nodoc:
# Build conditionals
@if_condition = options.delete(:if)
@unless_condition = options.delete(:unless)
# Build event requirement
@event_requirement = build_matcher(options, :on, :except_on)
if (options.keys - [:from, :to, :on, :except_from, :except_to, :except_on]).empty?
# Explicit from/to requirements specified
@state_requirements = [{:from => build_matcher(options, :from, :except_from), :to => build_matcher(options, :to, :except_to)}]
else
# Separate out the event requirement
options.delete(:on)
options.delete(:except_on)
# Implicit from/to requirements specified
@state_requirements = options.collect do |from, to|
from = WhitelistMatcher.new(from) unless from.is_a?(Matcher)
to = WhitelistMatcher.new(to) unless to.is_a?(Matcher)
{:from => from, :to => to}
end
end
# Track known states. The order that requirements are iterated is based
# on the priority in which tracked states should be added.
@known_states = []
@state_requirements.each do |state_requirement|
[:from, :to].each {|option| @known_states |= state_requirement[option].values}
end
end
# Determines whether the given object / query matches the requirements
# configured for this branch. In addition to matching the event, from state,
# and to state, this will also check whether the configured :if/:unless
# conditions pass on the given object.
#
# == Examples
#
# branch = StateMachines::Branch.new(:parked => :idling, :on => :ignite)
#
# # Successful
# branch.matches?(object, :on => :ignite) # => true
# branch.matches?(object, :from => nil) # => true
# branch.matches?(object, :from => :parked) # => true
# branch.matches?(object, :to => :idling) # => true
# branch.matches?(object, :from => :parked, :to => :idling) # => true
# branch.matches?(object, :on => :ignite, :from => :parked, :to => :idling) # => true
#
# # Unsuccessful
# branch.matches?(object, :on => :park) # => false
# branch.matches?(object, :from => :idling) # => false
# branch.matches?(object, :to => :first_gear) # => false
# branch.matches?(object, :from => :parked, :to => :first_gear) # => false
# branch.matches?(object, :on => :park, :from => :parked, :to => :idling) # => false
def matches?(object, query = {})
!match(object, query).nil?
end
# Attempts to match the given object / query against the set of requirements
# configured for this branch. In addition to matching the event, from state,
# and to state, this will also check whether the configured :if/:unless
# conditions pass on the given object.
#
# If a match is found, then the event/state requirements that the query
# passed successfully will be returned. Otherwise, nil is returned if there
# was no match.
#
# Query options:
# * <tt>:from</tt> - One or more states being transitioned from. If none
# are specified, then this will always match.
# * <tt>:to</tt> - One or more states being transitioned to. If none are
# specified, then this will always match.
# * <tt>:on</tt> - One or more events that fired the transition. If none
# are specified, then this will always match.
# * <tt>:guard</tt> - Whether to guard matches with the if/unless
# conditionals defined for this branch. Default is true.
#
# == Examples
#
# branch = StateMachines::Branch.new(:parked => :idling, :on => :ignite)
#
# branch.match(object, :on => :ignite) # => {:to => ..., :from => ..., :on => ...}
# branch.match(object, :on => :park) # => nil
def match(object, query = {})
query.assert_valid_keys(:from, :to, :on, :guard)
if (match = match_query(query)) && matches_conditions?(object, query)
match
end
end
def draw(graph, event, valid_states)
fail NotImplementedError
end
protected
# Builds a matcher strategy to use for the given options. If neither a
# whitelist nor a blacklist option is specified, then an AllMatcher is
# built.
def build_matcher(options, whitelist_option, blacklist_option)
options.assert_exclusive_keys(whitelist_option, blacklist_option)
if options.include?(whitelist_option)
value = options[whitelist_option]
value.is_a?(Matcher) ? value : WhitelistMatcher.new(options[whitelist_option])
elsif options.include?(blacklist_option)
value = options[blacklist_option]
raise ArgumentError, ":#{blacklist_option} option cannot use matchers; use :#{whitelist_option} instead" if value.is_a?(Matcher)
BlacklistMatcher.new(value)
else
AllMatcher.instance
end
end
# Verifies that all configured requirements (event and state) match the
# given query. If a match is found, then a hash containing the
# event/state requirements that passed will be returned; otherwise, nil.
def match_query(query)
query ||= {}
if match_event(query) && (state_requirement = match_states(query))
state_requirement.merge(:on => event_requirement)
end
end
# Verifies that the event requirement matches the given query
def match_event(query)
matches_requirement?(query, :on, event_requirement)
end
# Verifies that the state requirements match the given query. If a
# matching requirement is found, then it is returned.
def match_states(query)
state_requirements.detect do |state_requirement|
[:from, :to].all? {|option| matches_requirement?(query, option, state_requirement[option])}
end
end
# Verifies that an option in the given query matches the values required
# for that option
# Verifies that the conditionals for this branch evaluate to true for the
# given object
def matches_conditions?(object, query)
query[:guard] == false ||
Array(if_condition).all? {|condition| evaluate_method(object, condition)} &&
!Array(unless_condition).any? {|condition| evaluate_method(object, condition)}
end
end
|
puppetlabs/beaker-aws | lib/beaker/hypervisor/aws_sdk.rb | Beaker.AwsSdk.launch_all_nodes | ruby | def launch_all_nodes
@logger.notify("aws-sdk: launch all hosts in configuration")
ami_spec = YAML.load_file(@options[:ec2_yaml])["AMI"]
global_subnet_id = @options['subnet_id']
global_subnets = @options['subnet_ids']
if global_subnet_id and global_subnets
raise RuntimeError, 'Config specifies both subnet_id and subnet_ids'
end
no_subnet_hosts = []
specific_subnet_hosts = []
some_subnet_hosts = []
@hosts.each do |host|
if global_subnet_id or host['subnet_id']
specific_subnet_hosts.push(host)
elsif global_subnets
some_subnet_hosts.push(host)
else
no_subnet_hosts.push(host)
end
end
instances = [] # Each element is {:instance => i, :host => h}
begin
@logger.notify("aws-sdk: launch instances not particular about subnet")
launch_nodes_on_some_subnet(some_subnet_hosts, global_subnets, ami_spec,
instances)
@logger.notify("aws-sdk: launch instances requiring a specific subnet")
specific_subnet_hosts.each do |host|
subnet_id = host['subnet_id'] || global_subnet_id
instance = create_instance(host, ami_spec, subnet_id)
instances.push({:instance => instance, :host => host})
end
@logger.notify("aws-sdk: launch instances requiring no subnet")
no_subnet_hosts.each do |host|
instance = create_instance(host, ami_spec, nil)
instances.push({:instance => instance, :host => host})
end
wait_for_status(:running, instances)
rescue Exception => ex
@logger.notify("aws-sdk: exception #{ex.class}: #{ex}")
kill_instances(instances.map{|x| x[:instance]})
raise ex
end
# At this point, all instances should be running since wait
# either returns on success or throws an exception.
if instances.empty?
raise RuntimeError, "Didn't manage to launch any EC2 instances"
end
# Assign the now known running instances to their hosts.
instances.each {|x| x[:host]['instance'] = x[:instance]}
nil
end | Create EC2 instances for all hosts, tag them, and wait until
they're running. When a host provides a subnet_id, create the
instance in that subnet, otherwise prefer a CONFIG subnet_id.
If neither are set but there is a CONFIG subnet_ids list,
attempt to create the host in each specified subnet, which might
fail due to capacity constraints, for example. Specifying both
a CONFIG subnet_id and subnet_ids will provoke an error.
@return [void]
@api private | train | https://github.com/puppetlabs/beaker-aws/blob/f2e448b4e7c7ccb17940b86afc25cee5eb5cbb39/lib/beaker/hypervisor/aws_sdk.rb#L432-L482 | class AwsSdk < Beaker::Hypervisor
ZOMBIE = 3 #anything older than 3 hours is considered a zombie
PING_SECURITY_GROUP_NAME = 'beaker-ping'
attr_reader :default_region
# Initialize AwsSdk hypervisor driver
#
# @param [Array<Beaker::Host>] hosts Array of Beaker::Host objects
# @param [Hash<String, String>] options Options hash
def initialize(hosts, options)
@hosts = hosts
@options = options
@logger = options[:logger]
@default_region = ENV['AWS_REGION'] || 'us-west-2'
# Get AWS credentials
creds = options[:use_fog_credentials] ? load_credentials() : nil
config = {
:credentials => creds,
:logger => Logger.new($stdout),
:log_level => :debug,
:log_formatter => Aws::Log::Formatter.colored,
:retry_limit => 12,
:region => ENV['AWS_REGION'] || 'us-west-2'
}.delete_if{ |k,v| v.nil? }
Aws.config.update(config)
@client = {}
@client.default_proc = proc do |hash, key|
hash[key] = Aws::EC2::Client.new(:region => key)
end
test_split_install()
end
def client(region = default_region)
@client[region]
end
# Provision all hosts on EC2 using the Aws::EC2 API
#
# @return [void]
def provision
start_time = Time.now
# Perform the main launch work
launch_all_nodes()
# Add metadata tags to each instance
# tagging early as some nodes take longer
# to initialize and terminate before it has
# a chance to provision
add_tags()
# adding the correct security groups to the
# network interface, as during the `launch_all_nodes()`
# step they never get assigned, although they get created
modify_network_interface()
wait_for_status_netdev()
# Grab the ip addresses and dns from EC2 for each instance to use for ssh
populate_dns()
#enable root if user is not root
enable_root_on_hosts()
# Set the hostname for each box
set_hostnames()
# Configure /etc/hosts on each host
configure_hosts()
@logger.notify("aws-sdk: Provisioning complete in #{Time.now - start_time} seconds")
nil #void
end
def regions
@regions ||= client.describe_regions.regions.map(&:region_name)
end
# Kill all instances.
#
# @param instances [Enumerable<Aws::EC2::Types::Instance>]
# @return [void]
def kill_instances(instances)
running_instances = instances.compact.select do |instance|
instance_by_id(instance.instance_id).state.name == 'running'
end
instance_ids = running_instances.map(&:instance_id)
return nil if instance_ids.empty?
@logger.notify("aws-sdk: killing EC2 instance(s) #{instance_ids.join(', ')}")
client.terminate_instances(:instance_ids => instance_ids)
nil
end
# Cleanup all earlier provisioned hosts on EC2 using the Aws::EC2 library
#
# It goes without saying, but a #cleanup does nothing without a #provision
# method call first.
#
# @return [void]
def cleanup
# Provisioning should have set the host 'instance' values.
kill_instances(@hosts.map{ |h| h['instance'] }.select{ |x| !x.nil? })
delete_key_pair_all_regions()
nil
end
# Print instances to the logger. Instances will be from all regions
# associated with provided key name and limited by regex compared to
# instance status. Defaults to running instances.
#
# @param [String] key The key_name to match for
# @param [Regex] status The regular expression to match against the instance's status
def log_instances(key = key_name, status = /running/)
instances = []
regions.each do |region|
@logger.debug "Reviewing: #{region}"
client(region).describe_instances.reservations.each do |reservation|
reservation.instances.each do |instance|
if (instance.key_name =~ /#{key}/) and (instance.state.name =~ status)
instances << instance
end
end
end
end
output = ""
instances.each do |instance|
dns_name = instance.public_dns_name || instance.private_dns_name
output << "#{instance.instance_id} keyname: #{instance.key_name}, dns name: #{dns_name}, private ip: #{instance.private_ip_address}, ip: #{instance.public_ip_address}, launch time #{instance.launch_time}, status: #{instance.state.name}\n"
end
@logger.notify("aws-sdk: List instances (keyname: #{key})")
@logger.notify("#{output}")
end
# Provided an id return an instance object.
# Instance object will respond to methods described here: {http://docs.aws.amazon.com/AWSRubySDK/latest/AWS/EC2/Instance.html AWS Instance Object}.
# @param [String] id The id of the instance to return
# @return [Aws::EC2::Types::Instance] An Aws::EC2 instance object
def instance_by_id(id)
client.describe_instances(:instance_ids => [id]).reservations.first.instances.first
end
# Return all instances currently on ec2.
# @see AwsSdk#instance_by_id
# @return [Array<Aws::Ec2::Types::Instance>] An array of Aws::EC2 instance objects
def instances
client.describe_instances.reservations.map(&:instances).flatten
end
# Provided an id return a VPC object.
# VPC object will respond to methods described here: {http://docs.aws.amazon.com/AWSRubySDK/latest/AWS/EC2/VPC.html AWS VPC Object}.
# @param [String] id The id of the VPC to return
# @return [Aws::EC2::Types::Vpc] An Aws::EC2 vpc object
def vpc_by_id(id)
client.describe_vpcs(:vpc_ids => [id]).vpcs.first
end
# Return all VPCs currently on ec2.
# @see AwsSdk#vpc_by_id
# @return [Array<Aws::EC2::Types::Vpc>] An array of Aws::EC2 vpc objects
def vpcs
client.describe_vpcs.vpcs
end
# Provided an id return a security group object
# Security object will respond to methods described here: {http://docs.aws.amazon.com/AWSRubySDK/latest/AWS/EC2/SecurityGroup.html AWS SecurityGroup Object}.
# @param [String] id The id of the security group to return
# @return [Aws::EC2::Types::SecurityGroup] An Aws::EC2 security group object
def security_group_by_id(id)
client.describe_security_groups(:group_ids => [id]).security_groups.first
end
# Return all security groups currently on ec2.
# @see AwsSdk#security_goup_by_id
# @return [Array<Aws::EC2::Types::SecurityGroup>] An array of Aws::EC2 security group objects
def security_groups
client.describe_security_groups.security_groups
end
# Shutdown and destroy ec2 instances idenfitied by key that have been alive
# longer than ZOMBIE hours.
#
# @param [Integer] max_age The age in hours that a machine needs to be older than to be considered a zombie
# @param [String] key The key_name to match for
def kill_zombies(max_age = ZOMBIE, key = key_name)
@logger.notify("aws-sdk: Kill Zombies! (keyname: #{key}, age: #{max_age} hrs)")
instances_to_kill = []
time_now = Time.now.getgm #ec2 uses GM time
#examine all available regions
regions.each do |region|
@logger.debug "Reviewing: #{region}"
client(region).describe_instances.reservations.each do |reservation|
reservation.instances.each do |instance|
if (instance.key_name =~ /#{key}/)
@logger.debug "Examining #{instance.instance_id} (keyname: #{instance.key_name}, launch time: #{instance.launch_time}, state: #{instance.state.name})"
if ((time_now - instance.launch_time) > max_age*60*60) and instance.state.name !~ /terminated/
@logger.debug "Kill! #{instance.instance_id}: #{instance.key_name} (Current status: #{instance.state.name})"
instances_to_kill << instance
end
end
end
end
end
kill_instances(instances_to_kill)
delete_key_pair_all_regions(key_name_prefix)
@logger.notify "#{key}: Killed #{instances_to_kill.length} instance(s)"
end
# Destroy any volumes marked 'available', INCLUDING THOSE YOU DON'T OWN! Use with care.
def kill_zombie_volumes
# Occasionaly, tearing down ec2 instances leaves orphaned EBS volumes behind -- these stack up quickly.
# This simply looks for EBS volumes that are not in use
@logger.notify("aws-sdk: Kill Zombie Volumes!")
volume_count = 0
regions.each do |region|
@logger.debug "Reviewing: #{region}"
available_volumes = client(region).describe_volumes(
:filters => [
{ :name => 'status', :values => ['available'], }
]
).volumes
available_volumes.each do |volume|
begin
client(region).delete_volume(:volume_id => volume.id)
volume_count += 1
rescue Aws::EC2::Errors::InvalidVolume::NotFound => e
@logger.debug "Failed to remove volume: #{volume.id} #{e}"
end
end
end
@logger.notify "Freed #{volume_count} volume(s)"
end
# Create an EC2 instance for host, tag it, and return it.
#
# @return [void]
# @api private
def create_instance(host, ami_spec, subnet_id)
amitype = host['vmname'] || host['platform']
amisize = host['amisize'] || 'm1.small'
vpc_id = host['vpc_id'] || @options['vpc_id'] || nil
host['sg_cidr_ips'] = host['sg_cidr_ips'] || '0.0.0.0/0';
sg_cidr_ips = host['sg_cidr_ips'].split(',')
assoc_pub_ip_addr = host['associate_public_ip_address']
if vpc_id && !subnet_id
raise RuntimeError, "A subnet_id must be provided with a vpc_id"
end
if assoc_pub_ip_addr && !subnet_id
raise RuntimeError, "A subnet_id must be provided when configuring assoc_pub_ip_addr"
end
# Use snapshot provided for this host
image_type = host['snapshot']
raise RuntimeError, "No snapshot/image_type provided for EC2 provisioning" unless image_type
ami = ami_spec[amitype]
ami_region = ami[:region]
# Main region object for ec2 operations
region = ami_region
# If we haven't defined a vpc_id then we use the default vpc for the provided region
unless vpc_id
@logger.notify("aws-sdk: filtering available vpcs in region by 'isDefault'")
default_vpcs = client(region).describe_vpcs(:filters => [{:name => 'isDefault', :values => ['true']}])
vpc_id = if default_vpcs.vpcs.empty?
nil
else
default_vpcs.vpcs.first.vpc_id
end
end
# Grab the vpc object based upon provided id
vpc = vpc_id ? client(region).describe_vpcs(:vpc_ids => [vpc_id]).vpcs.first : nil
# Grab image object
image_id = ami[:image][image_type.to_sym]
@logger.notify("aws-sdk: Checking image #{image_id} exists and getting its root device")
image = client(region).describe_images(:image_ids => [image_id]).images.first
raise RuntimeError, "Image not found: #{image_id}" if image.nil?
@logger.notify("Image Storage Type: #{image.root_device_type}")
# Transform the images block_device_mappings output into a format
# ready for a create.
block_device_mappings = []
if image.root_device_type == :ebs
orig_bdm = image.block_device_mappings
@logger.notify("aws-sdk: Image block_device_mappings: #{orig_bdm}")
orig_bdm.each do |block_device|
block_device_mappings << {
:device_name => block_device.device_name,
:ebs => {
# Change the default size of the root volume.
:volume_size => host['volume_size'] || block_device.ebs.volume_size,
# This is required to override the images default for
# delete_on_termination, forcing all volumes to be deleted once the
# instance is terminated.
:delete_on_termination => true,
}
}
end
end
security_group = ensure_group(vpc || region, Beaker::EC2Helper.amiports(host), sg_cidr_ips)
#check if ping is enabled
ping_security_group = ensure_ping_group(vpc || region, sg_cidr_ips)
msg = "aws-sdk: launching %p on %p using %p/%p%s" %
[host.name, amitype, amisize, image_type,
subnet_id ? ("in %p" % subnet_id) : '']
@logger.notify(msg)
config = {
:max_count => 1,
:min_count => 1,
:image_id => image_id,
:monitoring => {
:enabled => true,
},
:key_name => ensure_key_pair(region).key_pairs.first.key_name,
:instance_type => amisize,
:disable_api_termination => false,
:instance_initiated_shutdown_behavior => "terminate",
}
if assoc_pub_ip_addr
# this never gets created, so they end up with
# default security group which only allows for
# ssh access from outside world which
# doesn't work well with remote devices etc.
config[:network_interfaces] = [{
:subnet_id => subnet_id,
:groups => [security_group.group_id, ping_security_group.group_id],
:device_index => 0,
:associate_public_ip_address => assoc_pub_ip_addr,
}]
else
config[:subnet_id] = subnet_id
end
config[:block_device_mappings] = block_device_mappings if image.root_device_type == :ebs
reservation = client(region).run_instances(config)
reservation.instances.first
end
# For each host, create an EC2 instance in one of the specified
# subnets and push it onto instances_created. Each subnet will be
# tried at most once for each host, and more than one subnet may
# be tried if capacity constraints are encountered. Each Hash in
# instances_created will contain an :instance and :host value.
#
# @param hosts [Enumerable<Host>]
# @param subnets [Enumerable<String>]
# @param ami_spec [Hash]
# @param instances_created Enumerable<Hash{Symbol=>EC2::Instance,Host}>
# @return [void]
# @api private
def launch_nodes_on_some_subnet(hosts, subnets, ami_spec, instances_created)
# Shuffle the subnets so we don't always hit the same one
# first, and cycle though the subnets independently of the
# host, so we stick with one that's working. Try each subnet
# once per-host.
if subnets.nil? or subnets.empty?
return
end
subnet_i = 0
shuffnets = subnets.shuffle
hosts.each do |host|
instance = nil
shuffnets.length.times do
begin
subnet_id = shuffnets[subnet_i]
instance = create_instance(host, ami_spec, subnet_id)
instances_created.push({:instance => instance, :host => host})
break
rescue Aws::EC2::Errors::InsufficientInstanceCapacity
@logger.notify("aws-sdk: hit #{subnet_id} capacity limit; moving on")
subnet_i = (subnet_i + 1) % shuffnets.length
end
end
if instance.nil?
raise RuntimeError, "unable to launch host in any requested subnet"
end
end
end
# Create EC2 instances for all hosts, tag them, and wait until
# they're running. When a host provides a subnet_id, create the
# instance in that subnet, otherwise prefer a CONFIG subnet_id.
# If neither are set but there is a CONFIG subnet_ids list,
# attempt to create the host in each specified subnet, which might
# fail due to capacity constraints, for example. Specifying both
# a CONFIG subnet_id and subnet_ids will provoke an error.
#
# @return [void]
# @api private
# Wait until all instances reach the desired state. Each Hash in
# instances must contain an :instance and :host value.
#
# @param state_name [String] EC2 state to wait for, 'running', 'stopped', etc.
# @param instances Enumerable<Hash{Symbol=>EC2::Instance,Host}>
# @param block [Proc] more complex checks can be made by passing a
# block in. This overrides the status parameter.
# EC2::Instance objects from the hosts will be
# yielded to the passed block
# @return [void]
# @api private
# FIXME: rename to #wait_for_state
def wait_for_status(state_name, instances, &block)
# Wait for each node to reach status :running
@logger.notify("aws-sdk: Waiting for all hosts to be #{state_name}")
instances.each do |x|
name = x[:host] ? x[:host].name : x[:name]
instance = x[:instance]
@logger.notify("aws-sdk: Wait for node #{name} to be #{state_name}")
# Here we keep waiting for the machine state to reach 'running' with an
# exponential backoff for each poll.
# TODO: should probably be a in a shared method somewhere
for tries in 1..10
refreshed_instance = instance_by_id(instance.instance_id)
if refreshed_instance.nil?
@logger.debug("Instance #{name} not yet available (#{e})")
else
if block_given?
test_result = yield refreshed_instance
else
test_result = refreshed_instance.state.name.to_s == state_name.to_s
end
if test_result
x[:instance] = refreshed_instance
# Always sleep, so the next command won't cause a throttle
backoff_sleep(tries)
break
elsif tries == 10
raise "Instance never reached state #{state_name}"
end
end
backoff_sleep(tries)
end
end
end
# Handles special checks needed for netdev platforms.
#
# @note if any host is an netdev one, these checks will happen once across all
# of the hosts, and then we'll exit
#
# @return [void]
# @api private
def wait_for_status_netdev()
@hosts.each do |host|
if host['platform'] =~ /f5-|netscaler/
wait_for_status(:running, @hosts)
wait_for_status(nil, @hosts) do |instance|
instance_status_collection = client.describe_instance_status({:instance_ids => [instance.instance_id]})
first_instance = instance_status_collection.first[:instance_statuses].first
first_instance[:instance_status][:status] == "ok" if first_instance
end
break
end
end
end
# Add metadata tags to all instances
#
# @return [void]
# @api private
def add_tags
@hosts.each do |host|
instance = host['instance']
# Define tags for the instance
@logger.notify("aws-sdk: Add tags for #{host.name}")
tags = [
{
:key => 'jenkins_build_url',
:value => @options[:jenkins_build_url],
},
{
:key => 'Name',
:value => host.name,
},
{
:key => 'department',
:value => @options[:department],
},
{
:key => 'project',
:value => @options[:project],
},
{
:key => 'created_by',
:value => @options[:created_by],
},
]
host[:host_tags].each do |name, val|
tags << { :key => name.to_s, :value => val }
end
client.create_tags(
:resources => [instance.instance_id],
:tags => tags.reject { |r| r[:value].nil? },
)
end
nil
end
# Add correct security groups to hosts network_interface
# as during the create_instance stage it is too early in process
# to configure
#
# @return [void]
# @api private
def modify_network_interface
@hosts.each do |host|
instance = host['instance']
host['sg_cidr_ips'] = host['sg_cidr_ips'] || '0.0.0.0/0';
sg_cidr_ips = host['sg_cidr_ips'].split(',')
# Define tags for the instance
@logger.notify("aws-sdk: Update network_interface for #{host.name}")
security_group = ensure_group(instance[:network_interfaces].first, Beaker::EC2Helper.amiports(host), sg_cidr_ips)
ping_security_group = ensure_ping_group(instance[:network_interfaces].first, sg_cidr_ips)
client.modify_network_interface_attribute(
:network_interface_id => "#{instance[:network_interfaces].first[:network_interface_id]}",
:groups => [security_group.group_id, ping_security_group.group_id],
)
end
nil
end
# Populate the hosts IP address from the EC2 dns_name
#
# @return [void]
# @api private
def populate_dns
# Obtain the IP addresses and dns_name for each host
@hosts.each do |host|
@logger.notify("aws-sdk: Populate DNS for #{host.name}")
instance = host['instance']
host['ip'] = instance.public_ip_address || instance.private_ip_address
host['private_ip'] = instance.private_ip_address
host['dns_name'] = instance.public_dns_name || instance.private_dns_name
@logger.notify("aws-sdk: name: #{host.name} ip: #{host['ip']} private_ip: #{host['private_ip']} dns_name: #{host['dns_name']}")
end
nil
end
# Return a valid /etc/hosts line for a given host
#
# @param [Beaker::Host] host Beaker::Host object for generating /etc/hosts entry
# @param [Symbol] interface Symbol identifies which ip should be used for host
# @return [String] formatted hosts entry for host
# @api private
def etc_hosts_entry(host, interface = :ip)
name = host.name
domain = get_domain_name(host)
ip = host[interface.to_s]
"#{ip}\t#{name} #{name}.#{domain} #{host['dns_name']}\n"
end
# Configure /etc/hosts for each node
#
# @note f5 hosts are skipped since this isn't a valid step there
#
# @return [void]
# @api private
def configure_hosts
non_netdev_windows_hosts = @hosts.select{ |h| !(h['platform'] =~ /f5-|netscaler|windows/) }
non_netdev_windows_hosts.each do |host|
host_entries = non_netdev_windows_hosts.map do |h|
h == host ? etc_hosts_entry(h, :private_ip) : etc_hosts_entry(h)
end
host_entries.unshift "127.0.0.1\tlocalhost localhost.localdomain\n"
set_etc_hosts(host, host_entries.join(''))
end
nil
end
# Enables root for instances with custom username like ubuntu-amis
#
# @return [void]
# @api private
def enable_root_on_hosts
@hosts.each do |host|
if host['disable_root_ssh'] == true
@logger.notify("aws-sdk: Not enabling root for instance as disable_root_ssh is set to 'true'.")
else
@logger.notify("aws-sdk: Enabling root ssh")
enable_root(host)
end
end
end
# Enables root access for a host when username is not root
#
# @return [void]
# @api private
def enable_root(host)
if host['user'] != 'root'
if host['platform'] =~ /f5-/
enable_root_f5(host)
elsif host['platform'] =~ /netscaler/
enable_root_netscaler(host)
else
copy_ssh_to_root(host, @options)
enable_root_login(host, @options)
host['user'] = 'root'
end
host.close
end
end
# Enables root access for a host on an f5 platform
# @note This method does not support other platforms
#
# @return nil
# @api private
def enable_root_f5(host)
for tries in 1..10
begin
#This command is problematic as the F5 is not always done loading
if host.exec(Command.new("modify sys db systemauth.disablerootlogin value false"), :acceptable_exit_codes => [0,1]).exit_code == 0 \
and host.exec(Command.new("modify sys global-settings gui-setup disabled"), :acceptable_exit_codes => [0,1]).exit_code == 0 \
and host.exec(Command.new("save sys config"), :acceptable_exit_codes => [0,1]).exit_code == 0
backoff_sleep(tries)
break
elsif tries == 10
raise "Instance was unable to be configured"
end
rescue Beaker::Host::CommandFailure => e
@logger.debug("Instance not yet configured (#{e})")
end
backoff_sleep(tries)
end
host['user'] = 'admin'
sha256 = Digest::SHA256.new
password = sha256.hexdigest((1..50).map{(rand(86)+40).chr}.join.gsub(/\\/,'\&\&')) + 'password!'
# disabling password policy to account for the enforcement level set
# and the generated password is sometimes too `01070366:3: Bad password (admin): BAD PASSWORD: \
# it is too simplistic/systematic`
host.exec(Command.new('modify auth password-policy policy-enforcement disabled'))
host.exec(Command.new("modify auth user admin password #{password}"))
@logger.notify("f5: Configured admin password to be #{password}")
host.close
host['ssh'] = {:password => password}
end
# Enables root access for a host on an netscaler platform
# @note This method does not support other platforms
#
# @return nil
# @api private
def enable_root_netscaler(host)
host['ssh'] = {:password => host['instance'].instance_id}
@logger.notify("netscaler: nsroot password is #{host['instance'].instance_id}")
end
# Set the :vmhostname for each host object to be the dns_name, which is accessible
# publicly. Then configure each ec2 machine to that dns_name, so that when facter
# is installed the facts for hostname and domain match the dns_name.
#
# if :use_beaker_hostnames: is true, set the :vmhostname and hostname of each ec2
# machine to the host[:name] from the beaker hosts file.
#
# @return [@hosts]
# @api private
def set_hostnames
if @options[:use_beaker_hostnames]
@hosts.each do |host|
host[:vmhostname] = host.name
if host['platform'] =~ /el-7/
# on el-7 hosts, the hostname command doesn't "stick" randomly
host.exec(Command.new("hostnamectl set-hostname #{host.name}"))
elsif host['platform'] =~ /windows/
@logger.notify('aws-sdk: Change hostname on windows is not supported.')
else
next if host['platform'] =~ /f5-|netscaler/
host.exec(Command.new("hostname #{host.name}"))
if host['vmname'] =~ /^amazon/
# Amazon Linux requires this to preserve host name changes across reboots.
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/set-hostname.html
# Also note that without an elastic ip set, while this will
# preserve the hostname across a full shutdown/startup of the vm
# (as opposed to a reboot) -- the ip address will have changed.
host.exec(Command.new("sed -ie '/^HOSTNAME/ s/=.*/=#{host.name}/' /etc/sysconfig/network"))
end
end
end
else
@hosts.each do |host|
host[:vmhostname] = host[:dns_name]
if host['platform'] =~ /el-7/
# on el-7 hosts, the hostname command doesn't "stick" randomly
host.exec(Command.new("hostnamectl set-hostname #{host.hostname}"))
elsif host['platform'] =~ /windows/
@logger.notify('aws-sdk: Change hostname on windows is not supported.')
else
next if host['platform'] =~ /ft-|netscaler/
host.exec(Command.new("hostname #{host.hostname}"))
if host['vmname'] =~ /^amazon/
# See note above
host.exec(Command.new("sed -ie '/^HOSTNAME/ s/=.*/=#{host.hostname}/' /etc/sysconfig/network"))
end
end
end
end
end
# Calculates and waits a back-off period based on the number of tries
#
# Logs each backupoff time and retry value to the console.
#
# @param tries [Number] number of tries to calculate back-off period
# @return [void]
# @api private
def backoff_sleep(tries)
# Exponential with some randomization
sleep_time = 2 ** tries
@logger.notify("aws-sdk: Sleeping #{sleep_time} seconds for attempt #{tries}.")
sleep sleep_time
nil
end
# Retrieve the public key locally from the executing users ~/.ssh directory
#
# @return [String] contents of public key
# @api private
def public_key
keys = Array(@options[:ssh][:keys])
keys << '~/.ssh/id_rsa'
keys << '~/.ssh/id_dsa'
key_file = keys.find do |key|
key_pub = key + '.pub'
File.exist?(File.expand_path(key_pub)) && File.exist?(File.expand_path(key))
end
if key_file
@logger.debug("Using public key: #{key_file}")
else
raise RuntimeError, "Expected to find a public key, but couldn't in #{keys}"
end
File.read(File.expand_path(key_file + '.pub'))
end
# Generate a key prefix for key pair names
#
# @note This is the part of the key that will stay static between Beaker
# runs on the same host.
#
# @return [String] Beaker key pair name based on sanitized hostname
def key_name_prefix
safe_hostname = Socket.gethostname.gsub('.', '-')
"Beaker-#{local_user}-#{safe_hostname}"
end
# Generate a reusable key name from the local hosts hostname
#
# @return [String] safe key name for current host
# @api private
def key_name
"#{key_name_prefix}-#{@options[:aws_keyname_modifier]}-#{@options[:timestamp].strftime("%F_%H_%M_%S_%N")}"
end
# Returns the local user running this tool
#
# @return [String] username of local user
# @api private
def local_user
ENV['USER']
end
# Creates the KeyPair for this test run
#
# @param region [Aws::EC2::Region] region to create the key pair in
# @return [Aws::EC2::KeyPair] created key_pair
# @api private
def ensure_key_pair(region)
pair_name = key_name()
delete_key_pair(region, pair_name)
create_new_key_pair(region, pair_name)
end
# Deletes key pairs from all regions
#
# @param [String] keypair_name_filter if given, will get all keypairs that match
# a simple {::String#start_with?} filter. If no filter is given, the basic key
# name returned by {#key_name} will be used.
#
# @return nil
# @api private
def delete_key_pair_all_regions(keypair_name_filter=nil)
region_keypairs_hash = my_key_pairs(keypair_name_filter)
region_keypairs_hash.each_pair do |region, keypair_name_array|
keypair_name_array.each do |keypair_name|
delete_key_pair(region, keypair_name)
end
end
end
# Gets the Beaker user's keypairs by region
#
# @param [String] name_filter if given, will get all keypairs that match
# a simple {::String#start_with?} filter. If no filter is given, the basic key
# name returned by {#key_name} will be used.
#
# @return [Hash{String=>Array[String]}] a hash of region name to
# an array of the keypair names that match for the filter
# @api private
def my_key_pairs(name_filter=nil)
keypairs_by_region = {}
key_name_filter = name_filter ? "#{name_filter}-*" : key_name
regions.each do |region|
keypairs_by_region[region] = client(region).describe_key_pairs(
:filters => [{ :name => 'key-name', :values => [key_name_filter] }]
).key_pairs.map(&:key_name)
end
keypairs_by_region
end
# Deletes a given key pair
#
# @param [Aws::EC2::Region] region the region the key belongs to
# @param [String] pair_name the name of the key to be deleted
#
# @api private
def delete_key_pair(region, pair_name)
kp = client(region).describe_key_pairs(:key_names => [pair_name]).key_pairs.first
unless kp.nil?
@logger.debug("aws-sdk: delete key pair in region: #{region}")
client(region).delete_key_pair(:key_name => pair_name)
end
rescue Aws::EC2::Errors::InvalidKeyPairNotFound
nil
end
# Create a new key pair for a given Beaker run
#
# @param [Aws::EC2::Region] region the region the key pair will be imported into
# @param [String] pair_name the name of the key to be created
#
# @return [Aws::EC2::KeyPair] key pair created
# @raise [RuntimeError] raised if AWS keypair not created
def create_new_key_pair(region, pair_name)
@logger.debug("aws-sdk: importing new key pair: #{pair_name}")
client(region).import_key_pair(:key_name => pair_name, :public_key_material => public_key)
begin
client(region).wait_until(:key_pair_exists, { :key_names => [pair_name] }, :max_attempts => 5, :delay => 2)
rescue Aws::Waiters::Errors::WaiterFailed
raise RuntimeError, "AWS key pair #{pair_name} can not be queried, even after import"
end
end
# Return a reproducable security group identifier based on input ports
#
# @param ports [Array<Number>] array of port numbers
# @return [String] group identifier
# @api private
def group_id(ports)
if ports.nil? or ports.empty?
raise ArgumentError, "Ports list cannot be nil or empty"
end
unless ports.is_a? Set
ports = Set.new(ports)
end
# Lolwut, #hash is inconsistent between ruby processes
"Beaker-#{Zlib.crc32(ports.inspect)}"
end
# Return an existing group, or create new one
#
# Accepts a VPC as input for checking & creation.
#
# @param vpc [Aws::EC2::VPC] the AWS vpc control object
# @param sg_cidr_ips [Array<String>] CIDRs used for outbound security group rule
# @return [Aws::EC2::SecurityGroup] created security group
# @api private
def ensure_ping_group(vpc, sg_cidr_ips = ['0.0.0.0/0'])
@logger.notify("aws-sdk: Ensure security group exists that enables ping, create if not")
group = client.describe_security_groups(
:filters => [
{ :name => 'group-name', :values => [PING_SECURITY_GROUP_NAME] },
{ :name => 'vpc-id', :values => [vpc.vpc_id] },
]
).security_groups.first
if group.nil?
group = create_ping_group(vpc, sg_cidr_ips)
end
group
end
# Return an existing group, or create new one
#
# Accepts a VPC as input for checking & creation.
#
# @param vpc [Aws::EC2::VPC] the AWS vpc control object
# @param ports [Array<Number>] an array of port numbers
# @param sg_cidr_ips [Array<String>] CIDRs used for outbound security group rule
# @return [Aws::EC2::SecurityGroup] created security group
# @api private
def ensure_group(vpc, ports, sg_cidr_ips = ['0.0.0.0/0'])
@logger.notify("aws-sdk: Ensure security group exists for ports #{ports.to_s}, create if not")
name = group_id(ports)
group = client.describe_security_groups(
:filters => [
{ :name => 'group-name', :values => [name] },
{ :name => 'vpc-id', :values => [vpc.vpc_id] },
]
).security_groups.first
if group.nil?
group = create_group(vpc, ports, sg_cidr_ips)
end
group
end
# Create a new ping enabled security group
#
# Accepts a region or VPC for group creation.
#
# @param region_or_vpc [Aws::EC2::Region, Aws::EC2::VPC] the AWS region or vpc control object
# @param sg_cidr_ips [Array<String>] CIDRs used for outbound security group rule
# @return [Aws::EC2::SecurityGroup] created security group
# @api private
def create_ping_group(region_or_vpc, sg_cidr_ips = ['0.0.0.0/0'])
@logger.notify("aws-sdk: Creating group #{PING_SECURITY_GROUP_NAME}")
cl = region_or_vpc.is_a?(String) ? client(region_or_vpc) : client
params = {
:description => 'Custom Beaker security group to enable ping',
:group_name => PING_SECURITY_GROUP_NAME,
}
params[:vpc_id] = region_or_vpc.vpc_id if region_or_vpc.is_a?(Aws::EC2::Types::Vpc)
group = cl.create_security_group(params)
sg_cidr_ips.each do |cidr_ip|
add_ingress_rule(
cl,
group,
cidr_ip,
'8', # 8 == ICMPv4 ECHO request
'-1', # -1 == All ICMP codes
'icmp',
)
end
group
end
# Create a new security group
#
# Accepts a region or VPC for group creation.
#
# @param region_or_vpc [Aws::EC2::Region, Aws::EC2::VPC] the AWS region or vpc control object
# @param ports [Array<Number>] an array of port numbers
# @param sg_cidr_ips [Array<String>] CIDRs used for outbound security group rule
# @return [Aws::EC2::SecurityGroup] created security group
# @api private
def create_group(region_or_vpc, ports, sg_cidr_ips = ['0.0.0.0/0'])
name = group_id(ports)
@logger.notify("aws-sdk: Creating group #{name} for ports #{ports.to_s}")
@logger.notify("aws-sdk: Creating group #{name} with CIDR IPs #{sg_cidr_ips.to_s}")
cl = region_or_vpc.is_a?(String) ? client(region_or_vpc) : client
params = {
:description => "Custom Beaker security group for #{ports.to_a}",
:group_name => name,
}
params[:vpc_id] = region_or_vpc.vpc_id if region_or_vpc.is_a?(Aws::EC2::Types::Vpc)
group = cl.create_security_group(params)
unless ports.is_a? Set
ports = Set.new(ports)
end
sg_cidr_ips.each do |cidr_ip|
ports.each do |port|
add_ingress_rule(cl, group, cidr_ip, port, port)
end
end
group
end
# Authorizes connections from certain CIDR to a range of ports
#
# @param cl [Aws::EC2::Client]
# @param sg_group [Aws::EC2::SecurityGroup] the AWS security group
# @param cidr_ip [String] CIDR used for outbound security group rule
# @param from_port [String] Starting Port number in the range
# @param to_port [String] Ending Port number in the range
# @return [void]
# @api private
def add_ingress_rule(cl, sg_group, cidr_ip, from_port, to_port, protocol = 'tcp')
cl.authorize_security_group_ingress(
:cidr_ip => cidr_ip,
:ip_protocol => protocol,
:from_port => from_port,
:to_port => to_port,
:group_id => sg_group.group_id,
)
end
# Return a hash containing AWS credentials
#
# @return [Hash<Symbol, String>] AWS credentials
# @api private
def load_credentials
return load_env_credentials if load_env_credentials.set?
load_fog_credentials(@options[:dot_fog])
end
# Return AWS credentials loaded from environment variables
#
# @param prefix [String] environment variable prefix
# @return [Aws::Credentials] ec2 credentials
# @api private
def load_env_credentials(prefix='AWS')
Aws::Credentials.new(
ENV["#{prefix}_ACCESS_KEY_ID"],
ENV["#{prefix}_SECRET_ACCESS_KEY"],
ENV["#{prefix}_SESSION_TOKEN"]
)
end
# Return a hash containing the fog credentials for EC2
#
# @param dot_fog [String] dot fog path
# @return [Aws::Credentials] ec2 credentials
# @api private
def load_fog_credentials(dot_fog = '.fog')
default = get_fog_credentials(dot_fog)
raise "You must specify an aws_access_key_id in your .fog file (#{dot_fog}) for ec2 instances!" unless default[:aws_access_key_id]
raise "You must specify an aws_secret_access_key in your .fog file (#{dot_fog}) for ec2 instances!" unless default[:aws_secret_access_key]
Aws::Credentials.new(
default[:aws_access_key_id],
default[:aws_secret_access_key],
default[:aws_session_token]
)
end
# Adds port 8143 to host[:additional_ports]
# if master, database and dashboard are not on same instance
def test_split_install
@hosts.each do |host|
mono_roles = ['master', 'database', 'dashboard']
roles_intersection = host[:roles] & mono_roles
if roles_intersection.size != 3 && roles_intersection.any?
host[:additional_ports] ? host[:additional_ports].push(8143) : host[:additional_ports] = [8143]
end
end
end
end
|
bdwyertech/chef-rundeck2 | lib/chef-rundeck/auth.rb | ChefRunDeck.Auth.role_admin? | ruby | def role_admin?(run_list = nil)
return false unless run_list.is_a?(Array)
# => This will Authorize Anyone if the RunList is Empty or the Chef Node does not exist!!!
run_list.empty? || auth['roles'].any? { |role| run_list.any? { |r| r =~ /role\[#{role}\]/i } }
end | => Role-Based Administration | train | https://github.com/bdwyertech/chef-rundeck2/blob/5c67fa2a2f4cd01716a0859dd4b900e740dfc8f7/lib/chef-rundeck/auth.rb#L73-L77 | module Auth
extend self
#############################
# => Authorization <= #
#############################
# => This holds the Authorization State
attr_accessor :auth
def auth
# => Define Authorization
@auth ||= reset!
end
def reset!
# => Reset Authorization
@auth = { 'roles' => [] }
end
def parse(user = nil)
# => Try to Find the User and their Authorization
auth = Util.parse_json_config(Config.auth_file, false)
return reset! unless auth && auth[user]
@auth = auth[user]
end
def admin?
# => Check if a User is an Administrator
auth['roles'].any? { |x| x.casecmp('admin').zero? }
end
def creator?(node)
# => Grab the Node-State Object
existing = State.find_state(node)
return false unless existing
# => Check if Auth User was the Node-State Creator
existing[:creator].to_s.casecmp(Config.query_params['auth_user'].to_s).zero?
end
# => Validate the User's Authentication Key ## TODO: Use this, passthrough from a RunDeck Option Field
def key?
# => We store a SHA512 Hex Digest of the Key
return false unless Config.query_params['auth_key']
Digest::SHA512.hexdigest(Config.query_params['auth_key']) == auth['auth_key']
end
# => TODO: Project-Based Validation
def project_admin?(project = nil)
return false unless project.is_a?(Array)
# => parse_auth.include?(user) && parse_auth[user]['roles'].any? { |r| ['admin', project].include? r.to_s.downcase }
auth['roles'].any? { |r| ['admin', project].include? r.to_s.downcase }
end
# => Role-Based Administration
end
|
algolia/algoliasearch-client-ruby | lib/algolia/index.rb | Algolia.Index.search_rules | ruby | def search_rules(query, params = {}, request_options = {})
anchoring = params[:anchoring]
context = params[:context]
page = params[:page] || params['page'] || 0
hits_per_page = params[:hitsPerPage] || params['hitsPerPage'] || 20
params = {
:query => query,
:page => page,
:hitsPerPage => hits_per_page
}
params[:anchoring] = anchoring unless anchoring.nil?
params[:context] = context unless context.nil?
client.post(Protocol.search_rules_uri(name), params.to_json, :read, request_options)
end | Search rules
@param query the query
@param params an optional hash of :anchoring, :context, :page, :hitsPerPage
@param request_options contains extra parameters to send with your query | train | https://github.com/algolia/algoliasearch-client-ruby/blob/5292cd9b1029f879e4e0257a3e89d0dc9ad0df3b/lib/algolia/index.rb#L1041-L1054 | class Index
attr_accessor :name, :client
def initialize(name, client = nil)
self.name = name
self.client = client || Algolia.client
end
#
# Delete an index
#
# @param request_options contains extra parameters to send with your query
#
# return an hash of the form { "deletedAt" => "2013-01-18T15:33:13.556Z", "taskID" => "42" }
#
def delete(request_options = {})
client.delete(Protocol.index_uri(name), :write, request_options)
end
alias_method :delete_index, :delete
#
# Delete an index and wait until the deletion has been processed
#
# @param request_options contains extra parameters to send with your query
#
# return an hash of the form { "deletedAt" => "2013-01-18T15:33:13.556Z", "taskID" => "42" }
#
def delete!(request_options = {})
res = delete(request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
alias_method :delete_index!, :delete!
#
# Add an object in this index
#
# @param object the object to add to the index.
# The object is represented by an associative array
# @param objectID (optional) an objectID you want to attribute to this object
# (if the attribute already exist the old object will be overridden)
# @param request_options contains extra parameters to send with your query
#
def add_object(object, objectID = nil, request_options = {})
check_object(object)
if objectID.nil? || objectID.to_s.empty?
client.post(Protocol.index_uri(name), object.to_json, :write, request_options)
else
client.put(Protocol.object_uri(name, objectID), object.to_json, :write, request_options)
end
end
#
# Add an object in this index and wait end of indexing
#
# @param object the object to add to the index.
# The object is represented by an associative array
# @param objectID (optional) an objectID you want to attribute to this object
# (if the attribute already exist the old object will be overridden)
# @param Request options object. Contains extra URL parameters or headers
#
def add_object!(object, objectID = nil, request_options = {})
res = add_object(object, objectID, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Add several objects in this index
#
# @param objects the array of objects to add inside the index.
# Each object is represented by an associative array
# @param request_options contains extra parameters to send with your query
#
def add_objects(objects, request_options = {})
batch(build_batch('addObject', objects, false), request_options)
end
#
# Add several objects in this index and wait end of indexing
#
# @param objects the array of objects to add inside the index.
# Each object is represented by an associative array
# @param request_options contains extra parameters to send with your query
#
def add_objects!(objects, request_options = {})
res = add_objects(objects, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Search inside the index
#
# @param query the full text query
# @param args (optional) if set, contains an associative array with query parameters:
# - page: (integer) Pagination parameter used to select the page to retrieve.
# Page is zero-based and defaults to 0. Thus, to retrieve the 10th page you need to set page=9
# - hitsPerPage: (integer) Pagination parameter used to select the number of hits per page. Defaults to 20.
# - attributesToRetrieve: a string that contains the list of object attributes you want to retrieve (let you minimize the answer size).
# Attributes are separated with a comma (for example "name,address").
# You can also use a string array encoding (for example ["name","address"]).
# By default, all attributes are retrieved. You can also use '*' to retrieve all values when an attributesToRetrieve setting is specified for your index.
# - attributesToHighlight: a string that contains the list of attributes you want to highlight according to the query.
# Attributes are separated by a comma. You can also use a string array encoding (for example ["name","address"]).
# If an attribute has no match for the query, the raw value is returned. By default all indexed text attributes are highlighted.
# You can use `*` if you want to highlight all textual attributes. Numerical attributes are not highlighted.
# A matchLevel is returned for each highlighted attribute and can contain:
# - full: if all the query terms were found in the attribute,
# - partial: if only some of the query terms were found,
# - none: if none of the query terms were found.
# - attributesToSnippet: a string that contains the list of attributes to snippet alongside the number of words to return (syntax is `attributeName:nbWords`).
# Attributes are separated by a comma (Example: attributesToSnippet=name:10,content:10).
# You can also use a string array encoding (Example: attributesToSnippet: ["name:10","content:10"]). By default no snippet is computed.
# - minWordSizefor1Typo: the minimum number of characters in a query word to accept one typo in this word. Defaults to 3.
# - minWordSizefor2Typos: the minimum number of characters in a query word to accept two typos in this word. Defaults to 7.
# - getRankingInfo: if set to 1, the result hits will contain ranking information in _rankingInfo attribute.
# - aroundLatLng: search for entries around a given latitude/longitude (specified as two floats separated by a comma).
# For example aroundLatLng=47.316669,5.016670).
# You can specify the maximum distance in meters with the aroundRadius parameter (in meters) and the precision for ranking with aroundPrecision
# (for example if you set aroundPrecision=100, two objects that are distant of less than 100m will be considered as identical for "geo" ranking parameter).
# At indexing, you should specify geoloc of an object with the _geoloc attribute (in the form {"_geoloc":{"lat":48.853409, "lng":2.348800}})
# - insideBoundingBox: search entries inside a given area defined by the two extreme points of a rectangle (defined by 4 floats: p1Lat,p1Lng,p2Lat,p2Lng).
# For example insideBoundingBox=47.3165,4.9665,47.3424,5.0201).
# At indexing, you should specify geoloc of an object with the _geoloc attribute (in the form {"_geoloc":{"lat":48.853409, "lng":2.348800}})
# - numericFilters: a string that contains the list of numeric filters you want to apply separated by a comma.
# The syntax of one filter is `attributeName` followed by `operand` followed by `value`. Supported operands are `<`, `<=`, `=`, `>` and `>=`.
# You can have multiple conditions on one attribute like for example numericFilters=price>100,price<1000.
# You can also use a string array encoding (for example numericFilters: ["price>100","price<1000"]).
# - tagFilters: filter the query by a set of tags. You can AND tags by separating them by commas.
# To OR tags, you must add parentheses. For example, tags=tag1,(tag2,tag3) means tag1 AND (tag2 OR tag3).
# You can also use a string array encoding, for example tagFilters: ["tag1",["tag2","tag3"]] means tag1 AND (tag2 OR tag3).
# At indexing, tags should be added in the _tags** attribute of objects (for example {"_tags":["tag1","tag2"]}).
# - facetFilters: filter the query by a list of facets.
# Facets are separated by commas and each facet is encoded as `attributeName:value`.
# For example: `facetFilters=category:Book,author:John%20Doe`.
# You can also use a string array encoding (for example `["category:Book","author:John%20Doe"]`).
# - facets: List of object attributes that you want to use for faceting.
# Attributes are separated with a comma (for example `"category,author"` ).
# You can also use a JSON string array encoding (for example ["category","author"]).
# Only attributes that have been added in **attributesForFaceting** index setting can be used in this parameter.
# You can also use `*` to perform faceting on all attributes specified in **attributesForFaceting**.
# - queryType: select how the query words are interpreted, it can be one of the following value:
# - prefixAll: all query words are interpreted as prefixes,
# - prefixLast: only the last word is interpreted as a prefix (default behavior),
# - prefixNone: no query word is interpreted as a prefix. This option is not recommended.
# - optionalWords: a string that contains the list of words that should be considered as optional when found in the query.
# The list of words is comma separated.
# - distinct: If set to 1, enable the distinct feature (disabled by default) if the attributeForDistinct index setting is set.
# This feature is similar to the SQL "distinct" keyword: when enabled in a query with the distinct=1 parameter,
# all hits containing a duplicate value for the attributeForDistinct attribute are removed from results.
# For example, if the chosen attribute is show_name and several hits have the same value for show_name, then only the best
# one is kept and others are removed.
# @param request_options contains extra parameters to send with your query
#
def search(query, params = {}, request_options = {})
encoded_params = Hash[params.map { |k, v| [k.to_s, v.is_a?(Array) ? v.to_json : v] }]
encoded_params[:query] = query
client.post(Protocol.search_post_uri(name), { :params => Protocol.to_query(encoded_params) }.to_json, :search, request_options)
end
class IndexBrowser
def initialize(client, name, params)
@client = client
@name = name
@params = params
@cursor = params[:cursor] || params['cursor'] || nil
end
def browse(request_options = {}, &block)
loop do
answer = @client.get(Protocol.browse_uri(@name, @params.merge({ :cursor => @cursor })), :read, request_options)
answer['hits'].each do |hit|
if block.arity == 2
yield hit, @cursor
else
yield hit
end
end
@cursor = answer['cursor']
break if @cursor.nil?
end
end
end
#
# Browse all index content
#
# @param queryParameters The hash of query parameters to use to browse
# To browse from a specific cursor, just add a ":cursor" parameters
# @param queryParameters An optional second parameters hash here for backward-compatibility (which will be merged with the first)
# @param request_options contains extra parameters to send with your query
#
# @DEPRECATED:
# @param page Pagination parameter used to select the page to retrieve.
# @param hits_per_page Pagination parameter used to select the number of hits per page. Defaults to 1000.
#
def browse(page_or_query_parameters = nil, hits_per_page = nil, request_options = {}, &block)
params = {}
if page_or_query_parameters.is_a?(Hash)
params.merge!(page_or_query_parameters)
else
params[:page] = page_or_query_parameters unless page_or_query_parameters.nil?
end
if hits_per_page.is_a?(Hash)
params.merge!(hits_per_page)
else
params[:hitsPerPage] = hits_per_page unless hits_per_page.nil?
end
if block_given?
IndexBrowser.new(client, name, params).browse(request_options, &block)
else
params[:page] ||= 0
params[:hitsPerPage] ||= 1000
client.get(Protocol.browse_uri(name, params), :read, request_options)
end
end
#
# Browse a single page from a specific cursor
#
# @param request_options contains extra parameters to send with your query
#
def browse_from(cursor, hits_per_page = 1000, request_options = {})
client.post(Protocol.browse_uri(name), { :cursor => cursor, :hitsPerPage => hits_per_page }.to_json, :read, request_options)
end
#
# Get an object from this index
#
# @param objectID the unique identifier of the object to retrieve
# @param attributes_to_retrieve (optional) if set, contains the list of attributes to retrieve as an array of strings of a string separated by ","
# @param request_options contains extra parameters to send with your query
#
def get_object(objectID, attributes_to_retrieve = nil, request_options = {})
attributes_to_retrieve = attributes_to_retrieve.join(',') if attributes_to_retrieve.is_a?(Array)
if attributes_to_retrieve.nil?
client.get(Protocol.object_uri(name, objectID, nil), :read, request_options)
else
client.get(Protocol.object_uri(name, objectID, { :attributes => attributes_to_retrieve }), :read, request_options)
end
end
#
# Get a list of objects from this index
#
# @param objectIDs the array of unique identifier of the objects to retrieve
# @param attributes_to_retrieve (optional) if set, contains the list of attributes to retrieve as an array of strings of a string separated by ","
# @param request_options contains extra parameters to send with your query
#
def get_objects(objectIDs, attributes_to_retrieve = nil, request_options = {})
attributes_to_retrieve = attributes_to_retrieve.join(',') if attributes_to_retrieve.is_a?(Array)
requests = objectIDs.map do |objectID|
req = { :indexName => name, :objectID => objectID.to_s }
req[:attributesToRetrieve] = attributes_to_retrieve unless attributes_to_retrieve.nil?
req
end
client.post(Protocol.objects_uri, { :requests => requests }.to_json, :read, request_options)['results']
end
#
# Check the status of a task on the server.
# All server task are asynchronous and you can check the status of a task with this method.
#
# @param taskID the id of the task returned by server
# @param request_options contains extra parameters to send with your query
#
def get_task_status(taskID, request_options = {})
client.get_task_status(name, taskID, request_options)
end
#
# Wait the publication of a task on the server.
# All server task are asynchronous and you can check with this method that the task is published.
#
# @param taskID the id of the task returned by server
# @param time_before_retry the time in milliseconds before retry (default = 100ms)
# @param request_options contains extra parameters to send with your query
#
def wait_task(taskID, time_before_retry = WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options = {})
client.wait_task(name, taskID, time_before_retry, request_options)
end
#
# Override the content of an object
#
# @param object the object to save
# @param objectID the associated objectID, if nil 'object' must contain an 'objectID' key
# @param request_options contains extra parameters to send with your query
#
def save_object(object, objectID = nil, request_options = {})
client.put(Protocol.object_uri(name, get_objectID(object, objectID)), object.to_json, :write, request_options)
end
#
# Override the content of object and wait end of indexing
#
# @param object the object to save
# @param objectID the associated objectID, if nil 'object' must contain an 'objectID' key
# @param request_options contains extra parameters to send with your query
#
def save_object!(object, objectID = nil, request_options = {})
res = save_object(object, objectID, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Override the content of several objects
#
# @param objects the array of objects to save, each object must contain an 'objectID' key
# @param request_options contains extra parameters to send with your query
#
def save_objects(objects, request_options = {})
batch(build_batch('updateObject', objects, true), request_options)
end
#
# Override the content of several objects and wait end of indexing
#
# @param objects the array of objects to save, each object must contain an objectID attribute
# @param request_options contains extra parameters to send with your query
#
def save_objects!(objects, request_options = {})
res = save_objects(objects, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Override the current objects by the given array of objects and wait end of indexing. Settings,
# synonyms and query rules are untouched. The objects are replaced without any downtime.
#
# @param objects the array of objects to save
# @param request_options contains extra parameters to send with your query
#
def replace_all_objects(objects, request_options = {})
safe = request_options[:safe] || request_options['safe'] || false
request_options.delete(:safe)
request_options.delete('safe')
tmp_index = @client.init_index(@name + '_tmp_' + rand(10000000).to_s)
responses = []
scope = ['settings', 'synonyms', 'rules']
res = @client.copy_index(@name, tmp_index.name, scope, request_options)
responses << res
if safe
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
end
batch = []
batch_size = 1000
count = 0
objects.each do |object|
batch << object
count += 1
if count == batch_size
res = tmp_index.add_objects(batch, request_options)
responses << res
batch = []
count = 0
end
end
if batch.any?
res = tmp_index.add_objects(batch, request_options)
responses << res
end
if safe
responses.each do |res|
tmp_index.wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
end
end
res = @client.move_index(tmp_index.name, @name, request_options)
responses << res
if safe
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
end
responses
end
#
# Override the current objects by the given array of objects and wait end of indexing
#
# @param objects the array of objects to save
# @param request_options contains extra parameters to send with your query
#
def replace_all_objects!(objects, request_options = {})
replace_all_objects(objects, request_options.merge(:safe => true))
end
#
# Update partially an object (only update attributes passed in argument)
#
# @param object the object attributes to override
# @param objectID the associated objectID, if nil 'object' must contain an 'objectID' key
# @param create_if_not_exits a boolean, if true creates the object if this one doesn't exist
# @param request_options contains extra parameters to send with your query
#
def partial_update_object(object, objectID = nil, create_if_not_exits = true, request_options = {})
client.post(Protocol.partial_object_uri(name, get_objectID(object, objectID), create_if_not_exits), object.to_json, :write, request_options)
end
#
# Partially override the content of several objects
#
# @param objects an array of objects to update (each object must contains a objectID attribute)
# @param create_if_not_exits a boolean, if true create the objects if they don't exist
# @param request_options contains extra parameters to send with your query
#
def partial_update_objects(objects, create_if_not_exits = true, request_options = {})
if create_if_not_exits
batch(build_batch('partialUpdateObject', objects, true), request_options)
else
batch(build_batch('partialUpdateObjectNoCreate', objects, true), request_options)
end
end
#
# Partially override the content of several objects and wait end of indexing
#
# @param objects an array of objects to update (each object must contains a objectID attribute)
# @param create_if_not_exits a boolean, if true create the objects if they don't exist
# @param request_options contains extra parameters to send with your query
#
def partial_update_objects!(objects, create_if_not_exits = true, request_options = {})
res = partial_update_objects(objects, create_if_not_exits, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Update partially an object (only update attributes passed in argument) and wait indexing
#
# @param object the attributes to override
# @param objectID the associated objectID, if nil 'object' must contain an 'objectID' key
# @param create_if_not_exits a boolean, if true creates the object if this one doesn't exist
# @param request_options contains extra parameters to send with your query
#
def partial_update_object!(object, objectID = nil, create_if_not_exits = true, request_options = {})
res = partial_update_object(object, objectID, create_if_not_exits, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Delete an object from the index
#
# @param objectID the unique identifier of object to delete
# @param request_options contains extra parameters to send with your query
#
def delete_object(objectID, request_options = {})
raise ArgumentError.new('objectID must not be blank') if objectID.nil? || objectID == ''
client.delete(Protocol.object_uri(name, objectID), :write, request_options)
end
#
# Delete an object from the index and wait end of indexing
#
# @param objectID the unique identifier of object to delete
# @param request_options contains extra parameters to send with your query
#
def delete_object!(objectID, request_options = {})
res = delete_object(objectID, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Delete several objects
#
# @param objects an array of objectIDs
# @param request_options contains extra parameters to send with your query
#
def delete_objects(objects, request_options = {})
check_array(objects)
batch(build_batch('deleteObject', objects.map { |objectID| { :objectID => objectID } }, false), request_options)
end
#
# Delete several objects and wait end of indexing
#
# @param objects an array of objectIDs
# @param request_options contains extra parameters to send with your query
#
def delete_objects!(objects, request_options = {})
res = delete_objects(objects, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Delete all objects matching a query
# This method retrieves all objects synchronously but deletes in batch
# asynchronously
#
# @param query the query string
# @param params the optional query parameters
# @param request_options contains extra parameters to send with your query
#
def delete_by_query(query, params = nil, request_options = {})
raise ArgumentError.new('query cannot be nil, use the `clear` method to wipe the entire index') if query.nil? && params.nil?
params = sanitized_delete_by_query_params(params)
params[:query] = query
params[:hitsPerPage] = 1000
params[:distinct] = false
params[:attributesToRetrieve] = ['objectID']
params[:cursor] = ''
ids = []
while params[:cursor] != nil
result = browse(params, nil, request_options)
params[:cursor] = result['cursor']
hits = result['hits']
break if hits.empty?
ids += hits.map { |hit| hit['objectID'] }
end
delete_objects(ids, request_options)
end
#
# Delete all objects matching a query and wait end of indexing
#
# @param query the query string
# @param params the optional query parameters
# @param request_options contains extra parameters to send with your query
#
def delete_by_query!(query, params = nil, request_options = {})
res = delete_by_query(query, params, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options) if res
res
end
#
# Delete all objects matching a query (doesn't work with actual text queries)
# This method deletes every record matching the filters provided
#
# @param params query parameters
# @param request_options contains extra parameters to send with your query
#
def delete_by(params, request_options = {})
raise ArgumentError.new('params cannot be nil, use the `clear` method to wipe the entire index') if params.nil?
params = sanitized_delete_by_query_params(params)
client.post(Protocol.delete_by_uri(name), params.to_json, :write, request_options)
end
#
# Delete all objects matching a query (doesn't work with actual text queries)
# This method deletes every record matching the filters provided and waits for the end of indexing
# @param params query parameters
# @param request_options contains extra parameters to send with your query
#
def delete_by!(params, request_options = {})
res = delete_by(params, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options) if res
res
end
#
# Delete the index content
#
# @param request_options contains extra parameters to send with your query
#
def clear(request_options = {})
client.post(Protocol.clear_uri(name), {}, :write, request_options)
end
alias_method :clear_index, :clear
#
# Delete the index content and wait end of indexing
#
def clear!(request_options = {})
res = clear(request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
alias_method :clear_index!, :clear!
#
# Set settings for this index
#
def set_settings(new_settings, options = {}, request_options = {})
client.put(Protocol.settings_uri(name, options), new_settings.to_json, :write, request_options)
end
#
# Set settings for this index and wait end of indexing
#
def set_settings!(new_settings, options = {}, request_options = {})
res = set_settings(new_settings, options, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Get settings of this index
#
def get_settings(options = {}, request_options = {})
options['getVersion'] = 2 if !options[:getVersion] && !options['getVersion']
client.get(Protocol.settings_uri(name, options).to_s, :read, request_options)
end
#
# List all existing user keys with their associated ACLs
#
# Deprecated: Please us `client.list_api_keys` instead.
def list_api_keys(request_options = {})
client.get(Protocol.index_keys_uri(name), :read, request_options)
end
#
# Get ACL of a user key
#
# Deprecated: Please us `client.get_api_key` instead.
def get_api_key(key, request_options = {})
client.get(Protocol.index_key_uri(name, key), :read, request_options)
end
#
# Create a new user key
#
# @param object can be two different parameters:
# The list of parameters for this key. Defined by a Hash that can
# contains the following values:
# - acl: array of string
# - validity: int
# - referers: array of string
# - description: string
# - maxHitsPerQuery: integer
# - queryParameters: string
# - maxQueriesPerIPPerHour: integer
# Or the list of ACL for this key. Defined by an array of String that
# can contains the following values:
# - search: allow to search (https and http)
# - addObject: allows to add/update an object in the index (https only)
# - deleteObject : allows to delete an existing object (https only)
# - deleteIndex : allows to delete index content (https only)
# - settings : allows to get index settings (https only)
# - editSettings : allows to change index settings (https only)
# @param validity the number of seconds after which the key will be automatically removed (0 means no time limit for this key)
# @param max_queries_per_IP_per_hour the maximum number of API calls allowed from an IP address per hour (0 means unlimited)
# @param max_hits_per_query the maximum number of hits this API key can retrieve in one call (0 means unlimited)
# @param request_options contains extra parameters to send with your query
#
# Deprecated: Please use `client.add_api_key` instead
def add_api_key(object, validity = 0, max_queries_per_IP_per_hour = 0, max_hits_per_query = 0, request_options = {})
if object.instance_of?(Array)
params = { :acl => object }
else
params = object
end
params['validity'] = validity.to_i if validity != 0
params['maxHitsPerQuery'] = max_hits_per_query.to_i if max_hits_per_query != 0
params['maxQueriesPerIPPerHour'] = max_queries_per_IP_per_hour.to_i if max_queries_per_IP_per_hour != 0
client.post(Protocol.index_keys_uri(name), params.to_json, :write, request_options)
end
#
# Update a user key
#
# @param object can be two different parameters:
# The list of parameters for this key. Defined by a Hash that
# can contains the following values:
# - acl: array of string
# - validity: int
# - referers: array of string
# - description: string
# - maxHitsPerQuery: integer
# - queryParameters: string
# - maxQueriesPerIPPerHour: integer
# Or the list of ACL for this key. Defined by an array of String that
# can contains the following values:
# - search: allow to search (https and http)
# - addObject: allows to add/update an object in the index (https only)
# - deleteObject : allows to delete an existing object (https only)
# - deleteIndex : allows to delete index content (https only)
# - settings : allows to get index settings (https only)
# - editSettings : allows to change index settings (https only)
# @param validity the number of seconds after which the key will be automatically removed (0 means no time limit for this key)
# @param max_queries_per_IP_per_hour the maximum number of API calls allowed from an IP address per hour (0 means unlimited)
# @param max_hits_per_query the maximum number of hits this API key can retrieve in one call (0 means unlimited)
# @param request_options contains extra parameters to send with your query
#
# Deprecated: Please use `client.update_api_key` instead
def update_api_key(key, object, validity = 0, max_queries_per_IP_per_hour = 0, max_hits_per_query = 0, request_options = {})
if object.instance_of?(Array)
params = { :acl => object }
else
params = object
end
params['validity'] = validity.to_i if validity != 0
params['maxHitsPerQuery'] = max_hits_per_query.to_i if max_hits_per_query != 0
params['maxQueriesPerIPPerHour'] = max_queries_per_IP_per_hour.to_i if max_queries_per_IP_per_hour != 0
client.put(Protocol.index_key_uri(name, key), params.to_json, :write, request_options)
end
#
# Delete an existing user key
#
# Deprecated: Please use `client.delete_api_key` instead
def delete_api_key(key, request_options = {})
client.delete(Protocol.index_key_uri(name, key), :write, request_options)
end
#
# Send a batch request
#
def batch(request, request_options = {})
client.post(Protocol.batch_uri(name), request.to_json, :batch, request_options)
end
#
# Send a batch request and wait the end of the indexing
#
def batch!(request, request_options = {})
res = batch(request, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Search for facet values
#
# @param facet_name Name of the facet to search. It must have been declared in the
# index's`attributesForFaceting` setting with the `searchable()` modifier.
# @param facet_query Text to search for in the facet's values
# @param search_parameters An optional query to take extra search parameters into account.
# These parameters apply to index objects like in a regular search query.
# Only facet values contained in the matched objects will be returned.
# @param request_options contains extra parameters to send with your query
#
def search_for_facet_values(facet_name, facet_query, search_parameters = {}, request_options = {})
params = search_parameters.clone
params['facetQuery'] = facet_query
client.post(Protocol.search_facet_uri(name, facet_name), params.to_json, :read, request_options)
end
# deprecated
alias_method :search_facet, :search_for_facet_values
#
# Perform a search with disjunctive facets generating as many queries as number of disjunctive facets
#
# @param query the query
# @param disjunctive_facets the array of disjunctive facets
# @param params a hash representing the regular query parameters
# @param refinements a hash ("string" -> ["array", "of", "refined", "values"]) representing the current refinements
# ex: { "my_facet1" => ["my_value1", ["my_value2"], "my_disjunctive_facet1" => ["my_value1", "my_value2"] }
# @param request_options contains extra parameters to send with your query
#
def search_disjunctive_faceting(query, disjunctive_facets, params = {}, refinements = {}, request_options = {})
raise ArgumentError.new('Argument "disjunctive_facets" must be a String or an Array') unless disjunctive_facets.is_a?(String) || disjunctive_facets.is_a?(Array)
raise ArgumentError.new('Argument "refinements" must be a Hash of Arrays') if !refinements.is_a?(Hash) || !refinements.select { |k, v| !v.is_a?(Array) }.empty?
# extract disjunctive facets & associated refinements
disjunctive_facets = disjunctive_facets.split(',') if disjunctive_facets.is_a?(String)
disjunctive_refinements = {}
refinements.each do |k, v|
disjunctive_refinements[k] = v if disjunctive_facets.include?(k) || disjunctive_facets.include?(k.to_s)
end
# build queries
queries = []
## hits + regular facets query
filters = []
refinements.to_a.each do |k, values|
r = values.map { |v| "#{k}:#{v}" }
if disjunctive_refinements[k.to_s] || disjunctive_refinements[k.to_sym]
# disjunctive refinements are ORed
filters << r
else
# regular refinements are ANDed
filters += r
end
end
queries << params.merge({ :index_name => self.name, :query => query, :facetFilters => filters })
## one query per disjunctive facet (use all refinements but the current one + hitsPerPage=1 + single facet)
disjunctive_facets.each do |disjunctive_facet|
filters = []
refinements.each do |k, values|
if k.to_s != disjunctive_facet.to_s
r = values.map { |v| "#{k}:#{v}" }
if disjunctive_refinements[k.to_s] || disjunctive_refinements[k.to_sym]
# disjunctive refinements are ORed
filters << r
else
# regular refinements are ANDed
filters += r
end
end
end
queries << params.merge({
:index_name => self.name,
:query => query,
:page => 0,
:hitsPerPage => 1,
:attributesToRetrieve => [],
:attributesToHighlight => [],
:attributesToSnippet => [],
:facets => disjunctive_facet,
:facetFilters => filters,
:analytics => false
})
end
answers = client.multiple_queries(queries, { :request_options => request_options })
# aggregate answers
## first answer stores the hits + regular facets
aggregated_answer = answers['results'][0]
## others store the disjunctive facets
aggregated_answer['disjunctiveFacets'] = {}
answers['results'].each_with_index do |a, i|
next if i == 0
a['facets'].each do |facet, values|
## add the facet to the disjunctive facet hash
aggregated_answer['disjunctiveFacets'][facet] = values
## concatenate missing refinements
(disjunctive_refinements[facet.to_s] || disjunctive_refinements[facet.to_sym] || []).each do |r|
if aggregated_answer['disjunctiveFacets'][facet][r].nil?
aggregated_answer['disjunctiveFacets'][facet][r] = 0
end
end
end
end
aggregated_answer
end
#
# Alias of Algolia.list_indexes
#
# @param request_options contains extra parameters to send with your query
#
def Index.all(request_options = {})
Algolia.list_indexes(request_options)
end
#
# Search synonyms
#
# @param query the query
# @param params an optional hash of :type, :page, :hitsPerPage
# @param request_options contains extra parameters to send with your query
#
def search_synonyms(query, params = {}, request_options = {})
type = params[:type] || params['type']
type = type.join(',') if type.is_a?(Array)
page = params[:page] || params['page'] || 0
hits_per_page = params[:hitsPerPage] || params['hitsPerPage'] || 20
params = {
:query => query,
:type => type.to_s,
:page => page,
:hitsPerPage => hits_per_page
}
client.post(Protocol.search_synonyms_uri(name), params.to_json, :read, request_options)
end
#
# Get a synonym
#
# @param objectID the synonym objectID
# @param request_options contains extra parameters to send with your query
def get_synonym(objectID, request_options = {})
client.get(Protocol.synonym_uri(name, objectID), :read, request_options)
end
#
# Delete a synonym
#
# @param objectID the synonym objectID
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def delete_synonym(objectID, forward_to_replicas = false, request_options = {})
client.delete("#{Protocol.synonym_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}", :write, request_options)
end
#
# Delete a synonym and wait the end of indexing
#
# @param objectID the synonym objectID
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def delete_synonym!(objectID, forward_to_replicas = false, request_options = {})
res = delete_synonym(objectID, forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Save a synonym
#
# @param objectID the synonym objectID
# @param synonym the synonym
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def save_synonym(objectID, synonym, forward_to_replicas = false, request_options = {})
client.put("#{Protocol.synonym_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}", synonym.to_json, :write, request_options)
end
#
# Save a synonym and wait the end of indexing
#
# @param objectID the synonym objectID
# @param synonym the synonym
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def save_synonym!(objectID, synonym, forward_to_replicas = false, request_options = {})
res = save_synonym(objectID, synonym, forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Clear all synonyms
#
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def clear_synonyms(forward_to_replicas = false, request_options = {})
client.post("#{Protocol.clear_synonyms_uri(name)}?forwardToReplicas=#{forward_to_replicas}", {}, :write, request_options)
end
#
# Clear all synonyms and wait the end of indexing
#
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def clear_synonyms!(forward_to_replicas = false, request_options = {})
res = clear_synonyms(forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Add/Update an array of synonyms
#
# @param synonyms the array of synonyms to add/update
# @param forward_to_replicas should we forward the delete to replica indices
# @param replace_existing_synonyms should we replace the existing synonyms before adding the new ones
# @param request_options contains extra parameters to send with your query
#
def batch_synonyms(synonyms, forward_to_replicas = false, replace_existing_synonyms = false, request_options = {})
client.post("#{Protocol.batch_synonyms_uri(name)}?forwardToReplicas=#{forward_to_replicas}&replaceExistingSynonyms=#{replace_existing_synonyms}", synonyms.to_json, :batch, request_options)
end
#
# Add/Update an array of synonyms and wait the end of indexing
#
# @param synonyms the array of synonyms to add/update
# @param forward_to_replicas should we forward the delete to replica indices
# @param replace_existing_synonyms should we replace the existing synonyms before adding the new ones
# @param request_options contains extra parameters to send with your query
#
def batch_synonyms!(synonyms, forward_to_replicas = false, replace_existing_synonyms = false, request_options = {})
res = batch_synonyms(synonyms, forward_to_replicas, replace_existing_synonyms, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Replace synonyms in the index by the given array of synonyms
#
# @param synonyms the array of synonyms to add
# @param request_options contains extra parameters to send with your query
#
def replace_all_synonyms(synonyms, request_options = {})
forward_to_replicas = request_options[:forwardToReplicas] || request_options['forwardToReplicas'] || false
batch_synonyms(synonyms, forward_to_replicas, true, request_options)
end
#
# Replace synonyms in the index by the given array of synonyms and wait the end of indexing
#
# @param synonyms the array of synonyms to add
# @param request_options contains extra parameters to send with your query
#
def replace_all_synonyms!(synonyms, request_options = {})
res = replace_all_synonyms(synonyms, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Export the full list of synonyms
# Accepts an optional block to which it will pass each synonym
# Also returns an array with all the synonyms
#
# @param hits_per_page Amount of synonyms to retrieve on each internal request - Optional - Default: 100
# @param request_options contains extra parameters to send with your query - Optional
#
def export_synonyms(hits_per_page = 100, request_options = {}, &_block)
res = []
page = 0
loop do
curr = search_synonyms('', { :hitsPerPage => hits_per_page, :page => page }, request_options)['hits']
curr.each do |synonym|
res << synonym
yield synonym if block_given?
end
break if curr.size < hits_per_page
page += 1
end
res
end
#
# Search rules
#
# @param query the query
# @param params an optional hash of :anchoring, :context, :page, :hitsPerPage
# @param request_options contains extra parameters to send with your query
#
#
# Get a rule
#
# @param objectID the rule objectID
# @param request_options contains extra parameters to send with your query
#
def get_rule(objectID, request_options = {})
client.get(Protocol.rule_uri(name, objectID), :read, request_options)
end
#
# Delete a rule
#
# @param objectID the rule objectID
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def delete_rule(objectID, forward_to_replicas = false, request_options = {})
client.delete("#{Protocol.rule_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}", :write, request_options)
end
#
# Delete a rule and wait the end of indexing
#
# @param objectID the rule objectID
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def delete_rule!(objectID, forward_to_replicas = false, request_options = {})
res = delete_rule(objectID, forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
return res
end
#
# Save a rule
#
# @param objectID the rule objectID
# @param rule the rule
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def save_rule(objectID, rule, forward_to_replicas = false, request_options = {})
raise ArgumentError.new('objectID must not be blank') if objectID.nil? || objectID == ''
client.put("#{Protocol.rule_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}", rule.to_json, :write, request_options)
end
#
# Save a rule and wait the end of indexing
#
# @param objectID the rule objectID
# @param rule the rule
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def save_rule!(objectID, rule, forward_to_replicas = false, request_options = {})
res = save_rule(objectID, rule, forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
return res
end
#
# Clear all rules
#
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def clear_rules(forward_to_replicas = false, request_options = {})
client.post("#{Protocol.clear_rules_uri(name)}?forwardToReplicas=#{forward_to_replicas}", {}, :write, request_options)
end
#
# Clear all rules and wait the end of indexing
#
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def clear_rules!(forward_to_replicas = false, request_options = {})
res = clear_rules(forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
return res
end
#
# Add/Update an array of rules
#
# @param rules the array of rules to add/update
# @param forward_to_replicas should we forward the delete to replica indices
# @param clear_existing_rules should we clear the existing rules before adding the new ones
# @param request_options contains extra parameters to send with your query
#
def batch_rules(rules, forward_to_replicas = false, clear_existing_rules = false, request_options = {})
client.post("#{Protocol.batch_rules_uri(name)}?forwardToReplicas=#{forward_to_replicas}&clearExistingRules=#{clear_existing_rules}", rules.to_json, :batch, request_options)
end
#
# Add/Update an array of rules and wait the end of indexing
#
# @param rules the array of rules to add/update
# @param forward_to_replicas should we forward the delete to replica indices
# @param clear_existing_rules should we clear the existing rules before adding the new ones
# @param request_options contains extra parameters to send with your query
#
def batch_rules!(rules, forward_to_replicas = false, clear_existing_rules = false, request_options = {})
res = batch_rules(rules, forward_to_replicas, clear_existing_rules, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
return res
end
#
# Replace rules in the index by the given array of rules
#
# @param rules the array of rules to add
# @param request_options contains extra parameters to send with your query
#
def replace_all_rules(rules, request_options = {})
forward_to_replicas = request_options[:forwardToReplicas] || request_options['forwardToReplicas'] || false
batch_rules(rules, forward_to_replicas, true, request_options)
end
#
# Replace rules in the index by the given array of rules and wait the end of indexing
#
# @param rules the array of rules to add
# @param request_options contains extra parameters to send with your query
#
def replace_all_rules!(rules, request_options = {})
res = replace_all_rules(rules, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Export the full list of rules
# Accepts an optional block to which it will pass each rule
# Also returns an array with all the rules
#
# @param hits_per_page Amount of rules to retrieve on each internal request - Optional - Default: 100
# @param request_options contains extra parameters to send with your query - Optional
#
def export_rules(hits_per_page = 100, request_options = {}, &_block)
res = []
page = 0
loop do
curr = search_rules('', { :hits_per_page => hits_per_page, :page => page }, request_options)['hits']
curr.each do |rule|
res << rule
yield rule if block_given?
end
break if curr.size < hits_per_page
page += 1
end
res
end
# Deprecated
alias_method :get_user_key, :get_api_key
alias_method :list_user_keys, :list_api_keys
alias_method :add_user_key, :add_api_key
alias_method :update_user_key, :update_api_key
alias_method :delete_user_key, :delete_api_key
private
def check_array(object)
raise ArgumentError.new('argument must be an array of objects') if !object.is_a?(Array)
end
def check_object(object, in_array = false)
case object
when Array
raise ArgumentError.new(in_array ? 'argument must be an array of objects' : 'argument must not be an array')
when String, Integer, Float, TrueClass, FalseClass, NilClass
raise ArgumentError.new("argument must be an #{'array of' if in_array} object, got: #{object.inspect}")
else
# ok
end
end
def get_objectID(object, objectID = nil)
check_object(object)
objectID ||= object[:objectID] || object['objectID']
raise ArgumentError.new("Missing 'objectID'") if objectID.nil?
return objectID
end
def build_batch(action, objects, with_object_id = false)
check_array(objects)
{
:requests => objects.map { |object|
check_object(object, true)
h = { :action => action, :body => object }
h[:objectID] = get_objectID(object).to_s if with_object_id
h
}
}
end
def sanitized_delete_by_query_params(params)
params ||= {}
params.delete(:hitsPerPage)
params.delete('hitsPerPage')
params.delete(:attributesToRetrieve)
params.delete('attributesToRetrieve')
params
end
end
|
wvanbergen/request-log-analyzer | lib/request_log_analyzer/controller.rb | RequestLogAnalyzer.Controller.run! | ruby | def run!
# @aggregators.each{|agg| p agg}
@aggregators.each { |agg| agg.prepare }
install_signal_handlers
@source.each_request do |request|
break if @interrupted
aggregate_request(filter_request(request))
end
@aggregators.each { |agg| agg.finalize }
@output.header
@aggregators.each { |agg| agg.report(@output) }
@output.footer
@source.finalize
if @output.io.is_a?(File)
unless @options[:silent]
puts
puts 'Report written to: ' + File.expand_path(@output.io.path)
puts 'Need an expert to analyze your application?'
puts 'Mail to [email protected] or visit us at http://railsdoctors.com'
puts 'Thanks for using request-log-analyzer!'
end
@output.io.close
elsif @output.io.is_a?(RequestLogAnalyzer::Mailer)
@output.io.mail
end
end | Runs RequestLogAnalyzer
1. Call prepare on every aggregator
2. Generate requests from source object
3. Filter out unwanted requests
4. Call aggregate for remaning requests on every aggregator
4. Call finalize on every aggregator
5. Call report on every aggregator
6. Finalize Source | train | https://github.com/wvanbergen/request-log-analyzer/blob/b83865d440278583ac8e4901bb33878244fd7c75/lib/request_log_analyzer/controller.rb#L328-L359 | class Controller
attr_reader :source, :filters, :aggregators, :output, :options
# Builds a RequestLogAnalyzer::Controller given parsed command line arguments
# <tt>arguments<tt> A CommandLine::Arguments hash containing parsed commandline parameters.
def self.build_from_arguments(arguments)
options = {}
# Copy fields
options[:database] = arguments[:database]
options[:reset_database] = arguments[:reset_database]
options[:debug] = arguments[:debug]
options[:yaml] = arguments[:yaml] || arguments[:dump]
options[:mail] = arguments[:mail]
options[:no_progress] = arguments[:no_progress]
options[:format] = arguments[:format]
options[:output] = arguments[:output]
options[:file] = arguments[:file]
options[:after] = arguments[:after]
options[:before] = arguments[:before]
options[:reject] = arguments[:reject]
options[:select] = arguments[:select]
options[:boring] = arguments[:boring]
options[:aggregator] = arguments[:aggregator]
options[:report_width] = arguments[:report_width]
options[:report_sort] = arguments[:report_sort]
options[:report_amount] = arguments[:report_amount]
options[:mailhost] = arguments[:mailhost]
options[:mailfrom] = arguments[:mailfrom]
options[:mailfrom_name] = arguments[:mailfrom_name]
options[:mailsubject] = arguments[:mailsubject]
options[:silent] = arguments[:silent]
options[:parse_strategy] = arguments[:parse_strategy]
# Apache format workaround
if arguments[:rails_format]
options[:format] = { rails: arguments[:rails_format] }
elsif arguments[:apache_format]
options[:format] = { apache: arguments[:apache_format] }
end
# Handle output format casing
if options[:output].class == String
options[:output] = 'HTML' if options[:output] =~ /^html$/i
options[:output] = 'FixedWidth' if options[:output] =~ /^fixed_?width$/i
end
# Register sources
if arguments.parameters.length == 1
file = arguments.parameters[0]
if file == '-' || file == 'STDIN'
options.store(:source_files, $stdin)
elsif File.exist?(file)
options.store(:source_files, file)
else
puts "File not found: #{file}"
exit(0)
end
else
options.store(:source_files, arguments.parameters)
end
# Guess file format
if !options[:format] && options[:source_files]
options[:format] = :rails3 # Default
if options[:source_files] != $stdin
if options[:source_files].class == String
options[:format] = RequestLogAnalyzer::FileFormat.autodetect(options[:source_files])
elsif options[:source_files].class == Array && options[:source_files].first != $stdin
options[:format] = RequestLogAnalyzer::FileFormat.autodetect(options[:source_files].first)
end
end
end
build(options)
end
# Build a new controller.
# Returns a new RequestLogAnalyzer::Controller object.
#
# Options
# * <tt>:after</tt> Drop all requests after this date (Date, DateTime, Time, or a String in "YYYY-MM-DD hh:mm:ss" format)
# * <tt>:aggregator</tt> Array of aggregators (Strings or Symbols for the builtin aggregators or a RequestLogAnalyzer::Aggregator class - Defaults to [:summarizer]).
# * <tt>:boring</tt> Do not show color on STDOUT (Defaults to false).
# * <tt>:before</tt> Drop all requests before this date (Date, DateTime, Time or a String in "YYYY-MM-DD hh:mm:ss" format)
# * <tt>:database</tt> Database file to insert encountered requests to.
# * <tt>:debug</tt> Enables echo aggregator which will echo each request analyzed.
# * <tt>:file</tt> Filestring, File or StringIO.
# * <tt>:format</tt> :rails, {:apache => 'FORMATSTRING'}, :merb, :amazon_s3, :mysql or RequestLogAnalyzer::FileFormat class. (Defaults to :rails).
# * <tt>:mail</tt> Email the results to this email address.
# * <tt>:mailhost</tt> Email the results to this mail server.
# * <tt>:mailfrom</tt> Set the Email sender address.
# * <tt>:mailfrom_alias</tt> Set the Email sender name.
# * <tt>:mailsubject</tt> Email subject.
# * <tt>:no_progress</tt> Do not display the progress bar (increases parsing speed).
# * <tt>:output</tt> 'FixedWidth', 'HTML' or RequestLogAnalyzer::Output class. Defaults to 'FixedWidth'.
# * <tt>:reject</tt> Reject specific {:field => :value} combination (expects a single hash).
# * <tt>:report_width</tt> Width of reports in characters for FixedWidth reports. (Defaults to 80)
# * <tt>:reset_database</tt> Reset the database before starting.
# * <tt>:select</tt> Select specific {:field => :value} combination (expects a single hash).
# * <tt>:source_files</tt> Source files to analyze. Provide either File, array of files or STDIN.
# * <tt>:yaml</tt> Output to YAML file.
# * <tt>:silent</tt> Minimal output automatically implies :no_progress
# * <tt>:source</tt> The class to instantiate to grab the requestes, must be a RequestLogAnalyzer::Source::Base descendant. (Defaults to RequestLogAnalyzer::Source::LogParser)
#
# === Example
# RequestLogAnalyzer::Controller.build(
# :output => :HTML,
# :mail => 'root@localhost',
# :after => Time.now - 24*60*60,
# :source_files => '/var/log/passenger.log'
# ).run!
#
# === Todo
# * Check if defaults work (Aggregator defaults seem wrong).
# * Refactor :database => options[:database], :dump => options[:dump] away from contoller intialization.
def self.build(options)
# Defaults
options[:output] ||= 'FixedWidth'
options[:format] ||= :rails
options[:aggregator] ||= [:summarizer]
options[:report_width] ||= 80
options[:report_amount] ||= 20
options[:report_sort] ||= 'sum,mean'
options[:boring] ||= false
options[:silent] ||= false
options[:source] ||= RequestLogAnalyzer::Source::LogParser
options[:no_progress] = true if options[:silent]
# Deprecation warnings
if options[:dump]
warn '[DEPRECATION] `:dump` is deprecated. Please use `:yaml` instead.'
options[:yaml] = options[:dump]
end
# Set the output class
output_args = {}
output_object = nil
if options[:output].is_a?(Class)
output_class = options[:output]
else
output_class = RequestLogAnalyzer::Output.const_get(options[:output])
end
output_sort = options[:report_sort].split(',').map { |s| s.to_sym }
output_amount = options[:report_amount] == 'all' ? :all : options[:report_amount].to_i
if options[:file]
output_object = %w( File StringIO ).include?(options[:file].class.name) ? options[:file] : File.new(options[:file], 'w+')
output_args = { width: 80, color: false, characters: :ascii, sort: output_sort, amount: output_amount }
elsif options[:mail]
output_object = RequestLogAnalyzer::Mailer.new(options[:mail], options[:mailhost], subject: options[:mailsubject], from: options[:mailfrom], from_alias: options[:mailfrom_name])
output_args = { width: 80, color: false, characters: :ascii, sort: output_sort, amount: output_amount }
else
output_object = STDOUT
output_args = { width: options[:report_width].to_i, color: !options[:boring],
characters: (options[:boring] ? :ascii : :utf), sort: output_sort, amount: output_amount }
end
output_instance = output_class.new(output_object, output_args)
# Create the controller with the correct file format
if options[:format].is_a?(Hash)
file_format = RequestLogAnalyzer::FileFormat.load(options[:format].keys[0], options[:format].values[0])
else
file_format = RequestLogAnalyzer::FileFormat.load(options[:format])
end
# Kickstart the controller
controller =
Controller.new(options[:source].new(file_format,
source_files: options[:source_files],
parse_strategy: options[:parse_strategy]),
output: output_instance,
database: options[:database], # FUGLY!
yaml: options[:yaml],
reset_database: options[:reset_database],
no_progress: options[:no_progress],
silent: options[:silent]
)
# register filters
if options[:after] || options[:before]
filter_options = {}
[:after, :before].each do |filter|
case options[filter]
when Date, DateTime, Time
filter_options[filter] = options[filter]
when String
filter_options[filter] = DateTime.parse(options[filter])
end
end
controller.add_filter(:timespan, filter_options)
end
if options[:reject]
options[:reject].each do |(field, value)|
controller.add_filter(:field, mode: :reject, field: field, value: value)
end
end
if options[:select]
options[:select].each do |(field, value)|
controller.add_filter(:field, mode: :select, field: field, value: value)
end
end
# register aggregators
options[:aggregator].each { |agg| controller.add_aggregator(agg) }
controller.add_aggregator(:summarizer) if options[:aggregator].empty?
controller.add_aggregator(:echo) if options[:debug]
controller.add_aggregator(:database_inserter) if options[:database] && !options[:aggregator].include?('database')
file_format.setup_environment(controller)
controller
end
# Builds a new Controller for the given log file format.
# <tt>format</tt> Logfile format. Defaults to :rails
# Options are passd on to the LogParser.
# * <tt>:database</tt> Database the controller should use.
# * <tt>:yaml</tt> Yaml Dump the contrller should use.
# * <tt>:output</tt> All report outputs get << through this output.
# * <tt>:no_progress</tt> No progress bar
# * <tt>:silent</tt> Minimal output, only error
def initialize(source, options = {})
@source = source
@options = options
@aggregators = []
@filters = []
@output = options[:output]
@interrupted = false
# Register the request format for this session after checking its validity
fail 'Invalid file format!' unless @source.file_format.valid?
# Install event handlers for wrnings, progress updates and source changes
@source.warning = lambda { |type, message, lineno| @aggregators.each { |agg| agg.warning(type, message, lineno) } }
@source.progress = lambda { |message, value| handle_progress(message, value) } unless options[:no_progress]
@source.source_changes = lambda { |change, filename| handle_source_change(change, filename) }
end
# Progress function.
# Expects :started with file, :progress with current line and :finished or :interrupted when done.
# <tt>message</tt> Current state (:started, :finished, :interupted or :progress).
# <tt>value</tt> File or current line.
def handle_progress(message, value = nil)
case message
when :started
@progress_bar = CommandLine::ProgressBar.new(File.basename(value), File.size(value), STDERR)
when :finished
@progress_bar.finish
@progress_bar = nil
when :interrupted
if @progress_bar
@progress_bar.halt
@progress_bar = nil
end
when :progress
@progress_bar.set(value)
end
end
# Source change handler
def handle_source_change(change, filename)
@aggregators.each { |agg| agg.source_change(change, File.expand_path(filename, Dir.pwd)) }
end
# Adds an aggregator to the controller. The aggregator will be called for every request
# that is parsed from the provided sources (see add_source)
def add_aggregator(agg)
agg = RequestLogAnalyzer::Aggregator.const_get(RequestLogAnalyzer.to_camelcase(agg)) if agg.is_a?(String) || agg.is_a?(Symbol)
@aggregators << agg.new(@source, @options)
end
alias_method :>>, :add_aggregator
# Adds a request filter to the controller.
def add_filter(filter, filter_options = {})
filter = RequestLogAnalyzer::Filter.const_get(RequestLogAnalyzer.to_camelcase(filter)) if filter.is_a?(Symbol)
@filters << filter.new(source.file_format, @options.merge(filter_options))
end
# Push a request through the entire filterchain (@filters).
# <tt>request</tt> The request to filter.
# Returns the filtered request or nil.
def filter_request(request)
@filters.each do |filter|
request = filter.filter(request)
return nil if request.nil?
end
request
end
# Push a request to all the aggregators (@aggregators).
# <tt>request</tt> The request to push to the aggregators.
def aggregate_request(request)
return false unless request
@aggregators.each { |agg| agg.aggregate(request) }
true
end
# Runs RequestLogAnalyzer
# 1. Call prepare on every aggregator
# 2. Generate requests from source object
# 3. Filter out unwanted requests
# 4. Call aggregate for remaning requests on every aggregator
# 4. Call finalize on every aggregator
# 5. Call report on every aggregator
# 6. Finalize Source
def install_signal_handlers
Signal.trap('INT') do
handle_progress(:interrupted)
puts 'Caught interrupt! Stopping parsing...'
@interrupted = true
end
end
end
|
dwaite/cookiejar | lib/cookiejar/jar.rb | CookieJar.Jar.get_cookie_header | ruby | def get_cookie_header(request_uri, opts = {})
cookies = get_cookies request_uri, opts
ver = [[], []]
cookies.each do |cookie|
ver[cookie.version] << cookie
end
if ver[1].empty?
# can do a netscape-style cookie header, relish the opportunity
cookies.map(&:to_s).join ';'
else
# build a RFC 2965-style cookie header. Split the cookies into
# version 0 and 1 groups so that we can reuse the '$Version' header
result = ''
unless ver[0].empty?
result << '$Version=0;'
result << ver[0].map do |cookie|
(cookie.to_s 1, false)
end.join(';')
# separate version 0 and 1 with a comma
result << ','
end
result << '$Version=1;'
ver[1].map do |cookie|
result << (cookie.to_s 1, false)
end
result
end
end | Given a request URI, return a string Cookie header.Cookies will be in
order per RFC 2965 - sorted by longest path length, but otherwise
unordered.
@param [String, URI] request_uri the address the HTTP request will be
sent to
@param [Hash] opts options controlling returned cookies
@option opts [Boolean] :script (false) Cookies marked HTTP-only will be
ignored if true
@return String value of the Cookie header which should be sent on the
HTTP request | train | https://github.com/dwaite/cookiejar/blob/c02007c13c93f6a71ae71c2534248a728b2965dd/lib/cookiejar/jar.rb#L254-L281 | class Jar
# Create a new empty Jar
def initialize
@domains = {}
end
# Given a request URI and a literal Set-Cookie header value, attempt to
# add the cookie(s) to the cookie store.
#
# @param [String, URI] request_uri the resource returning the header
# @param [String] cookie_header_value the contents of the Set-Cookie
# @return [Cookie] which was created and stored
# @raise [InvalidCookieError] if the cookie header did not validate
def set_cookie(request_uri, cookie_header_values)
cookie_header_values.split(/, (?=[\w]+=)/).each do |cookie_header_value|
cookie = Cookie.from_set_cookie request_uri, cookie_header_value
add_cookie cookie
end
end
# Given a request URI and a literal Set-Cookie2 header value, attempt to
# add the cookie to the cookie store.
#
# @param [String, URI] request_uri the resource returning the header
# @param [String] cookie_header_value the contents of the Set-Cookie2
# @return [Cookie] which was created and stored
# @raise [InvalidCookieError] if the cookie header did not validate
def set_cookie2(request_uri, cookie_header_value)
cookie = Cookie.from_set_cookie2 request_uri, cookie_header_value
add_cookie cookie
end
# Given a request URI and some HTTP headers, attempt to add the cookie(s)
# (from Set-Cookie or Set-Cookie2 headers) to the cookie store. If a
# cookie is defined (by equivalent name, domain, and path) via Set-Cookie
# and Set-Cookie2, the Set-Cookie version is ignored.
#
# @param [String, URI] request_uri the resource returning the header
# @param [Hash<String,[String,Array<String>]>] http_headers a Hash
# which may have a key of "Set-Cookie" or "Set-Cookie2", and values of
# either strings or arrays of strings
# @return [Array<Cookie>,nil] the cookies created, or nil if none found.
# @raise [InvalidCookieError] if one of the cookie headers contained
# invalid formatting or data
def set_cookies_from_headers(request_uri, http_headers)
set_cookie_key = http_headers.keys.detect { |k| /\ASet-Cookie\Z/i.match k }
cookies = gather_header_values http_headers[set_cookie_key] do |value|
begin
Cookie.from_set_cookie request_uri, value
rescue InvalidCookieError
end
end
set_cookie2_key = http_headers.keys.detect { |k| /\ASet-Cookie2\Z/i.match k }
cookies += gather_header_values(http_headers[set_cookie2_key]) do |value|
begin
Cookie.from_set_cookie2 request_uri, value
rescue InvalidCookieError
end
end
# build the list of cookies, using a Jar. Since Set-Cookie2 values
# come second, they will replace the Set-Cookie versions.
jar = Jar.new
cookies.each do |cookie|
jar.add_cookie cookie
end
cookies = jar.to_a
# now add them all to our own store.
cookies.each do |cookie|
add_cookie cookie
end
cookies
end
# Add a pre-existing cookie object to the jar.
#
# @param [Cookie] cookie a pre-existing cookie object
# @return [Cookie] the cookie added to the store
def add_cookie(cookie)
domain_paths = find_or_add_domain_for_cookie cookie
add_cookie_to_path domain_paths, cookie
cookie
end
# Return an array of all cookie objects in the jar
#
# @return [Array<Cookie>] all cookies. Includes any expired cookies
# which have not yet been removed with expire_cookies
def to_a
result = []
@domains.values.each do |paths|
paths.values.each do |cookies|
cookies.values.inject result, :<<
end
end
result
end
# Return a JSON 'object' for the various data values. Allows for
# persistence of the cookie information
#
# @param [Array] a options controlling output JSON text
# (usually a State and a depth)
# @return [String] JSON representation of object data
def to_json(*a)
{
'json_class' => self.class.name,
'cookies' => to_a.to_json(*a)
}.to_json(*a)
end
# Create a new Jar from a JSON-backed hash
#
# @param o [Hash] the expanded JSON object
# @return [CookieJar] a new CookieJar instance
def self.json_create(o)
o = JSON.parse(o) if o.is_a? String
o = o['cookies'] if o.is_a? Hash
cookies = o.inject([]) do |result, cookie_json|
result << (Cookie.json_create cookie_json)
end
from_a cookies
end
# Create a new Jar from an array of Cookie objects. Expired cookies
# will still be added to the archive, and conflicting cookies will
# be overwritten by the last cookie in the array.
#
# @param [Array<Cookie>] cookies array of cookie objects
# @return [CookieJar] a new CookieJar instance
def self.from_a(cookies)
jar = new
cookies.each do |cookie|
jar.add_cookie cookie
end
jar
end
# Look through the jar for any cookies which have passed their expiration
# date, or session cookies from a previous session
#
# @param session [Boolean] whether session cookies should be expired,
# or just cookies past their expiration date.
def expire_cookies(session = false)
@domains.delete_if do |_domain, paths|
paths.delete_if do |_path, cookies|
cookies.delete_if do |_cookie_name, cookie|
cookie.expired? || (session && cookie.session?)
end
cookies.empty?
end
paths.empty?
end
end
# Given a request URI, return a sorted list of Cookie objects. Cookies
# will be in order per RFC 2965 - sorted by longest path length, but
# otherwise unordered.
#
# @param [String, URI] request_uri the address the HTTP request will be
# sent to. This must be a full URI, i.e. must include the protocol,
# if you pass digi.ninja it will fail to find the domain, you must pass
# http://digi.ninja
# @param [Hash] opts options controlling returned cookies
# @option opts [Boolean] :script (false) Cookies marked HTTP-only will be
# ignored if true
# @return [Array<Cookie>] cookies which should be sent in the HTTP request
def get_cookies(request_uri, opts = {})
uri = to_uri request_uri
hosts = Cookie.compute_search_domains uri
return [] if hosts.nil?
path = if uri.path == ''
'/'
else
uri.path
end
results = []
hosts.each do |host|
domain = find_domain host
domain.each do |apath, cookies|
next unless path.start_with? apath
results += cookies.values.select do |cookie|
cookie.should_send? uri, opts[:script]
end
end
end
# Sort by path length, longest first
results.sort do |lhs, rhs|
rhs.path.length <=> lhs.path.length
end
end
# Given a request URI, return a string Cookie header.Cookies will be in
# order per RFC 2965 - sorted by longest path length, but otherwise
# unordered.
#
# @param [String, URI] request_uri the address the HTTP request will be
# sent to
# @param [Hash] opts options controlling returned cookies
# @option opts [Boolean] :script (false) Cookies marked HTTP-only will be
# ignored if true
# @return String value of the Cookie header which should be sent on the
# HTTP request
protected
def gather_header_values(http_header_value, &_block)
result = []
if http_header_value.is_a? Array
http_header_value.each do |value|
result << yield(value)
end
elsif http_header_value.is_a? String
result << yield(http_header_value)
end
result.compact
end
def to_uri(request_uri)
(request_uri.is_a? URI) ? request_uri : (URI.parse request_uri)
end
def find_domain(host)
@domains[host] || {}
end
def find_or_add_domain_for_cookie(cookie)
@domains[cookie.domain] ||= {}
end
def add_cookie_to_path(paths, cookie)
path_entry = (paths[cookie.path] ||= {})
path_entry[cookie.name] = cookie
end
end
|
hashicorp/vault-ruby | lib/vault/api/logical.rb | Vault.Logical.list | ruby | def list(path, options = {})
headers = extract_headers!(options)
json = client.list("/v1/#{encode_path(path)}", {}, headers)
json[:data][:keys] || []
rescue HTTPError => e
return [] if e.code == 404
raise
end | List the secrets at the given path, if the path supports listing. If the
the path does not exist, an exception will be raised.
@example
Vault.logical.list("secret") #=> [#<Vault::Secret>, #<Vault::Secret>, ...]
@param [String] path
the path to list
@return [Array<String>] | train | https://github.com/hashicorp/vault-ruby/blob/02f0532a802ba1a2a0d8703a4585dab76eb9d864/lib/vault/api/logical.rb#L26-L33 | class Logical < Request
# List the secrets at the given path, if the path supports listing. If the
# the path does not exist, an exception will be raised.
#
# @example
# Vault.logical.list("secret") #=> [#<Vault::Secret>, #<Vault::Secret>, ...]
#
# @param [String] path
# the path to list
#
# @return [Array<String>]
# Read the secret at the given path. If the secret does not exist, +nil+
# will be returned.
#
# @example
# Vault.logical.read("secret/password") #=> #<Vault::Secret lease_id="">
#
# @param [String] path
# the path to read
#
# @return [Secret, nil]
def read(path, options = {})
headers = extract_headers!(options)
json = client.get("/v1/#{encode_path(path)}", {}, headers)
return Secret.decode(json)
rescue HTTPError => e
return nil if e.code == 404
raise
end
# Write the secret at the given path with the given data. Note that the
# data must be a {Hash}!
#
# @example
# Vault.logical.write("secret/password", value: "secret") #=> #<Vault::Secret lease_id="">
#
# @param [String] path
# the path to write
# @param [Hash] data
# the data to write
#
# @return [Secret]
def write(path, data = {}, options = {})
headers = extract_headers!(options)
json = client.put("/v1/#{encode_path(path)}", JSON.fast_generate(data), headers)
if json.nil?
return true
else
return Secret.decode(json)
end
end
# Delete the secret at the given path. If the secret does not exist, vault
# will still return true.
#
# @example
# Vault.logical.delete("secret/password") #=> true
#
# @param [String] path
# the path to delete
#
# @return [true]
def delete(path)
client.delete("/v1/#{encode_path(path)}")
return true
end
# Unwrap the data stored against the given token. If the secret does not
# exist, `nil` will be returned.
#
# @example
# Vault.logical.unwrap("f363dba8-25a7-08c5-430c-00b2367124e6") #=> #<Vault::Secret lease_id="">
#
# @param [String] wrapper
# the token to use when unwrapping the value
#
# @return [Secret, nil]
def unwrap(wrapper)
client.with_token(wrapper) do |client|
json = client.get("/v1/cubbyhole/response")
secret = Secret.decode(json)
# If there is nothing in the cubbyhole, return early.
if secret.nil? || secret.data.nil? || secret.data[:response].nil?
return nil
end
# Extract the response and parse it into a new secret.
json = JSON.parse(secret.data[:response], symbolize_names: true)
secret = Secret.decode(json)
return secret
end
rescue HTTPError => e
return nil if e.code == 404
raise
end
# Unwrap a token in a wrapped response given the temporary token.
#
# @example
# Vault.logical.unwrap("f363dba8-25a7-08c5-430c-00b2367124e6") #=> "0f0f40fd-06ce-4af1-61cb-cdc12796f42b"
#
# @param [String, Secret] wrapper
# the token to unwrap
#
# @return [String, nil]
def unwrap_token(wrapper)
# If provided a secret, grab the token. This is really just to make the
# API a bit nicer.
if wrapper.is_a?(Secret)
wrapper = wrapper.wrap_info.token
end
# Unwrap
response = unwrap(wrapper)
# If nothing was there, return nil
if response.nil? || response.auth.nil?
return nil
end
return response.auth.client_token
rescue HTTPError => e
raise
end
end
|
koraktor/metior | lib/metior/adapter.rb | Metior::Adapter.ClassMethods.register_for | ruby | def register_for(vcs)
vcs = Metior.find_vcs vcs
vcs.register_adapter id, self
class_variable_set :@@vcs, vcs
end | Registers this adapter with a VCS
@param [Symbol] vcs_name The name of the VCS to register this adapter
with | train | https://github.com/koraktor/metior/blob/02da0f330774c91e1a7325a5a7edbe696f389f95/lib/metior/adapter.rb#L56-L60 | module ClassMethods
# Missing constants may indicate that the adapter is not yet initialized
#
# Trying to access either the `Actor`, `Commit` or `Repository` class
# in a adapter `Module` will trigger auto-loading first.
#
# @param [Symbol] const The symbolic name of the missing constant
# @see #init
def const_missing(const)
init if [:Actor, :Commit, :Repository].include?(const)
super unless const_defined? const
const_get const
end
# This initializes the adapter `Module`
#
# This requires the `Actor`, `Commit` and `Repository` classes for that
# adapter implementation.
def init
path = id.to_s
require "metior/adapter/#{path}/actor"
require "metior/adapter/#{path}/commit"
require "metior/adapter/#{path}/repository"
self
end
# Marks one or more features as not supported by the adapter
#
# @example Mark this adapter as not supporting file stats
# not_supporting :file_stats
# @param [Array<Symbol>] features The features that are not supported
# @see #supports?
def not_supporting(*features)
features.each do |feature|
class_variable_get(:@@features)[feature] = false
end
end
# Registers this adapter with a VCS
#
# @param [Symbol] vcs_name The name of the VCS to register this adapter
# with
# Checks if a specific feature is supported by the adapter
#
# @param [Symbol] feature The feature to check
# @return [true, false] `true` if the feature is supported
# @see #not_supported
# @see VCS#supports?
def supports?(feature)
class_variable_get(:@@features)[feature] == true
end
# Returns the VCS of the adapter
#
# @return [VCS] The VCS of the adapter
def vcs
class_variable_get :@@vcs
end
end
|
sds/haml-lint | lib/haml_lint/linter_selector.rb | HamlLint.LinterSelector.run_linter_on_file? | ruby | def run_linter_on_file?(config, linter, file)
linter_config = config.for_linter(linter)
if linter_config['include'].any? &&
!HamlLint::Utils.any_glob_matches?(linter_config['include'], file)
return false
end
if HamlLint::Utils.any_glob_matches?(linter_config['exclude'], file)
return false
end
true
end | Whether to run the given linter against the specified file.
@param config [HamlLint::Configuration]
@param linter [HamlLint::Linter]
@param file [String]
@return [Boolean] | train | https://github.com/sds/haml-lint/blob/024c773667e54cf88db938c2b368977005d70ee8/lib/haml_lint/linter_selector.rb#L64-L77 | class LinterSelector
# Creates a selector using the given configuration and additional options.
#
# @param config [HamlLint::Configuration]
# @param options [Hash]
def initialize(config, options)
@config = config
@options = options
end
# Returns the set of linters to run against the given file.
#
# @param file [String]
# @raise [HamlLint::Exceptions::NoLintersError] when no linters are enabled
# @return [Array<HamlLint::Linter>]
def linters_for_file(file)
@linters ||= extract_enabled_linters(@config, @options)
@linters.select { |linter| run_linter_on_file?(@config, linter, file) }
end
private
# Returns a list of linters that are enabled given the specified
# configuration and additional options.
#
# @param config [HamlLint::Configuration]
# @param options [Hash]
# @return [Array<HamlLint::Linter>]
def extract_enabled_linters(config, options)
included_linters =
LinterRegistry.extract_linters_from(options.fetch(:included_linters, []))
included_linters = LinterRegistry.linters if included_linters.empty?
excluded_linters =
LinterRegistry.extract_linters_from(options.fetch(:excluded_linters, []))
# After filtering out explicitly included/excluded linters, only include
# linters which are enabled in the configuration
linters = (included_linters - excluded_linters).map do |linter_class|
linter_config = config.for_linter(linter_class)
linter_class.new(linter_config) if linter_config['enabled']
end.compact
# Highlight condition where all linters were filtered out, as this was
# likely a mistake on the user's part
if linters.empty?
raise HamlLint::Exceptions::NoLintersError, 'No linters specified'
end
linters
end
# Whether to run the given linter against the specified file.
#
# @param config [HamlLint::Configuration]
# @param linter [HamlLint::Linter]
# @param file [String]
# @return [Boolean]
end
|
kontena/kontena | cli/lib/kontena/client.rb | Kontena.Client.parse_response | ruby | def parse_response(response)
check_version_and_warn(response.headers[X_KONTENA_VERSION])
if response.headers[CONTENT_TYPE] =~ JSON_REGEX
parse_json(response)
else
response.body
end
end | Parse response. If the respons is JSON, returns a Hash representation.
Otherwise returns the raw body.
@param [Excon::Response]
@return [Hash,String] | train | https://github.com/kontena/kontena/blob/5cb5b4457895985231ac88e78c8cbc5a8ffb5ec7/cli/lib/kontena/client.rb#L486-L494 | class Client
CLIENT_ID = ENV['KONTENA_CLIENT_ID'] || '15faec8a7a9b4f1e8b7daebb1307f1d8'.freeze
CLIENT_SECRET = ENV['KONTENA_CLIENT_SECRET'] || 'fb8942ae00da4c7b8d5a1898effc742f'.freeze
CONTENT_URLENCODED = 'application/x-www-form-urlencoded'.freeze
CONTENT_JSON = 'application/json'.freeze
JSON_REGEX = /application\/(.+?\+)?json/.freeze
CONTENT_TYPE = 'Content-Type'.freeze
X_KONTENA_VERSION = 'X-Kontena-Version'.freeze
ACCEPT = 'Accept'.freeze
AUTHORIZATION = 'Authorization'.freeze
ACCEPT_ENCODING = 'Accept-Encoding'.freeze
GZIP = 'gzip'.freeze
attr_accessor :default_headers
attr_accessor :path_prefix
attr_reader :http_client
attr_reader :last_response
attr_reader :options
attr_reader :token
attr_reader :logger
attr_reader :api_url
attr_reader :host
# Initialize api client
#
# @param [String] api_url
# @param [Kontena::Cli::Config::Token,Hash] access_token
# @param [Hash] options
def initialize(api_url, token = nil, options = {})
require 'json'
require 'excon'
require 'uri'
require 'base64'
require 'socket'
require 'openssl'
require 'uri'
require 'time'
require 'kontena/errors'
require 'kontena/cli/version'
require 'kontena/cli/config'
@api_url, @token, @options = api_url, token, options
uri = URI.parse(@api_url)
@host = uri.host
@logger = Kontena.logger
@options[:default_headers] ||= {}
excon_opts = {
omit_default_port: true,
connect_timeout: ENV["EXCON_CONNECT_TIMEOUT"] ? ENV["EXCON_CONNECT_TIMEOUT"].to_i : 10,
read_timeout: ENV["EXCON_READ_TIMEOUT"] ? ENV["EXCON_READ_TIMEOUT"].to_i : 30,
write_timeout: ENV["EXCON_WRITE_TIMEOUT"] ? ENV["EXCON_WRITE_TIMEOUT"].to_i : 10,
ssl_verify_peer: ignore_ssl_errors? ? false : true,
middlewares: Excon.defaults[:middlewares] + [Excon::Middleware::Decompress]
}
if Kontena.debug?
require 'kontena/debug_instrumentor'
excon_opts[:instrumentor] = Kontena::DebugInstrumentor
end
excon_opts[:ssl_ca_file] = @options[:ssl_cert_path]
excon_opts[:ssl_verify_peer_host] = @options[:ssl_subject_cn]
debug { "Excon opts: #{excon_opts.inspect}" }
@http_client = Excon.new(api_url, excon_opts)
@default_headers = {
ACCEPT => CONTENT_JSON,
CONTENT_TYPE => CONTENT_JSON,
'User-Agent' => "kontena-cli/#{Kontena::Cli::VERSION}"
}.merge(options[:default_headers])
if token
if token.kind_of?(String)
@token = { 'access_token' => token }
else
@token = token
end
end
@api_url = api_url
@path_prefix = options[:prefix] || '/v1/'
end
def debug(&block)
logger.debug("CLIENT", &block)
end
def error(&block)
logger.error("CLIENT", &block)
end
# Generates a header hash for HTTP basic authentication.
# Defaults to using client_id and client_secret as user/pass
#
# @param [String] username
# @param [String] password
# @return [Hash] auth_header_hash
def basic_auth_header(user = nil, pass = nil)
user ||= client_id
pass ||= client_secret
{
AUTHORIZATION =>
"Basic #{Base64.encode64([user, pass].join(':')).gsub(/[\r\n]/, '')}"
}
end
# Generates a bearer token authentication header hash if a token object is
# available. Otherwise returns an empty hash.
#
# @return [Hash] authentication_header
def bearer_authorization_header
if token && token['access_token']
{AUTHORIZATION => "Bearer #{token['access_token']}"}
else
{}
end
end
# Requests path supplied as argument and returns true if the request was a success.
# For checking if the current authentication is valid.
#
# @param [String] token_verify_path a path that requires authentication
# @return [Boolean]
def authentication_ok?(token_verify_path)
return false unless token
return false unless token['access_token']
return false unless token_verify_path
final_path = token_verify_path.gsub(/\:access\_token/, token['access_token'])
debug { "Requesting user info from #{final_path}" }
request(path: final_path)
true
rescue => ex
error { "Authentication verification exception" }
error { ex }
false
end
# Calls the code exchange endpoint in token's config to exchange an authorization_code
# to a access_token
def exchange_code(code)
return nil unless token_account
return nil unless token_account['token_endpoint']
response = request(
http_method: token_account['token_method'].downcase.to_sym,
path: token_account['token_endpoint'],
headers: { CONTENT_TYPE => token_account['token_post_content_type'] },
body: {
'grant_type' => 'authorization_code',
'code' => code,
'client_id' => Kontena::Client::CLIENT_ID,
'client_secret' => Kontena::Client::CLIENT_SECRET
},
expects: [200,201],
auth: false
)
response['expires_at'] ||= in_to_at(response['expires_in'])
response
end
# Return server version from a Kontena master by requesting '/'
#
# @return [String] version_string
def server_version
request(auth: false, expects: 200)['version']
rescue => ex
error { "Server version exception" }
error { ex }
nil
end
# OAuth2 client_id from ENV KONTENA_CLIENT_ID or client CLIENT_ID constant
#
# @return [String]
def client_id
ENV['KONTENA_CLIENT_ID'] || CLIENT_ID
end
# OAuth2 client_secret from ENV KONTENA_CLIENT_SECRET or client CLIENT_SECRET constant
#
# @return [String]
def client_secret
ENV['KONTENA_CLIENT_SECRET'] || CLIENT_SECRET
end
# Get request
#
# @param [String] path
# @param [Hash,NilClass] params
# @param [Hash] headers
# @return [Hash]
def get(path, params = nil, headers = {}, auth = true)
request(path: path, query: params, headers: headers, auth: auth)
end
# Post request
#
# @param [String] path
# @param [Object] obj
# @param [Hash] params
# @param [Hash] headers
# @return [Hash]
def post(path, obj, params = {}, headers = {}, auth = true)
request(http_method: :post, path: path, body: obj, query: params, headers: headers, auth: auth)
end
# Put request
#
# @param [String] path
# @param [Object] obj
# @param [Hash] params
# @param [Hash] headers
# @return [Hash]
def put(path, obj, params = {}, headers = {}, auth = true)
request(http_method: :put, path: path, body: obj, query: params, headers: headers, auth: auth)
end
# Patch request
#
# @param [String] path
# @param [Object] obj
# @param [Hash] params
# @param [Hash] headers
# @return [Hash]
def patch(path, obj, params = {}, headers = {}, auth = true)
request(http_method: :patch, path: path, body: obj, query: params, headers: headers, auth: auth)
end
# Delete request
#
# @param [String] path
# @param [Hash,String] body
# @param [Hash] params
# @param [Hash] headers
# @return [Hash]
def delete(path, body = nil, params = {}, headers = {}, auth = true)
request(http_method: :delete, path: path, body: body, query: params, headers: headers, auth: auth)
end
# Get stream request
#
# @param [String] path
# @param [Lambda] response_block
# @param [Hash,NilClass] params
# @param [Hash] headers
def get_stream(path, response_block, params = nil, headers = {}, auth = true)
request(path: path, query: params, headers: headers, response_block: response_block, auth: auth, gzip: false)
end
def token_expired?
return false unless token
if token.respond_to?(:expired?)
token.expired?
elsif token['expires_at'].to_i > 0
token['expires_at'].to_i < Time.now.utc.to_i
else
false
end
end
# Perform a HTTP request. Will try to refresh the access token and retry if it's
# expired or if the server responds with HTTP 401.
#
# Automatically parses a JSON response into a hash.
#
# After the request has been performed, the response can be inspected using
# client.last_response.
#
# @param http_method [Symbol] :get, :post, etc
# @param path [String] if it starts with / then prefix won't be used.
# @param body [Hash, String] will be encoded using #encode_body
# @param query [Hash] url query parameters
# @param headers [Hash] extra headers for request.
# @param response_block [Proc] for streaming requests, must respond to #call
# @param expects [Array] raises unless response status code matches this list.
# @param auth [Boolean] use token authentication default = true
# @return [Hash, String] response parsed response object
def request(http_method: :get, path:'/', body: nil, query: {}, headers: {}, response_block: nil, expects: [200, 201, 204], host: nil, port: nil, auth: true, gzip: true)
retried ||= false
if auth && token_expired?
raise Excon::Error::Unauthorized, "Token expired or not valid, you need to login again, use: kontena #{token_is_for_master? ? "master" : "cloud"} login"
end
request_headers = request_headers(headers, auth: auth, gzip: gzip)
if body.nil?
body_content = ''
request_headers.delete(CONTENT_TYPE)
else
body_content = encode_body(body, request_headers[CONTENT_TYPE])
request_headers.merge!('Content-Length' => body_content.bytesize)
end
uri = URI.parse(path)
host_options = {}
if uri.host
host_options[:host] = uri.host
host_options[:port] = uri.port
host_options[:scheme] = uri.scheme
path = uri.request_uri
else
host_options[:host] = host if host
host_options[:port] = port if port
end
request_options = {
method: http_method,
expects: Array(expects),
path: path_with_prefix(path),
headers: request_headers,
body: body_content,
query: query
}.merge(host_options)
request_options.merge!(response_block: response_block) if response_block
# Store the response into client.last_response
@last_response = http_client.request(request_options)
parse_response(@last_response)
rescue Excon::Error::Unauthorized
if token
debug { 'Server reports access token expired' }
if retried || !token || !token['refresh_token']
raise Kontena::Errors::StandardError.new(401, 'The access token has expired and needs to be refreshed')
end
retried = true
retry if refresh_token
end
raise Kontena::Errors::StandardError.new(401, 'Unauthorized')
rescue Excon::Error::HTTPStatus => error
if error.response.headers['Content-Encoding'] == 'gzip'
error.response.body = Zlib::GzipReader.new(StringIO.new(error.response.body)).read
end
debug { "Request #{error.request[:method].upcase} #{error.request[:path]}: #{error.response.status} #{error.response.reason_phrase}: #{error.response.body}" }
handle_error_response(error.response)
end
# Build a token refresh request param hash
#
# @return [Hash]
def refresh_request_params
{
refresh_token: token['refresh_token'],
grant_type: 'refresh_token',
client_id: client_id,
client_secret: client_secret
}
end
# Accessor to token's account settings
def token_account
return {} unless token
if token.respond_to?(:account)
token.account
elsif token.kind_of?(Hash) && token['account'].kind_of?(String)
config.find_account(token['account'])
else
{}
end
rescue => ex
error { "Access token refresh exception" }
error { ex }
false
end
# Perform refresh token request to auth provider.
# Updates the client's Token object and writes changes to
# configuration.
#
# @param [Boolean] use_basic_auth? When true, use basic auth authentication header
# @return [Boolean] success?
def refresh_token
debug { "Performing token refresh" }
return false if token.nil?
return false if token['refresh_token'].nil?
uri = URI.parse(token_account['token_endpoint'])
endpoint_data = { path: uri.path }
endpoint_data[:host] = uri.host if uri.host
endpoint_data[:port] = uri.port if uri.port
debug { "Token refresh endpoint: #{endpoint_data.inspect}" }
return false unless endpoint_data[:path]
response = request(
{
http_method: token_account['token_method'].downcase.to_sym,
body: refresh_request_params,
headers: {
CONTENT_TYPE => token_account['token_post_content_type']
}.merge(
token_account['code_requires_basic_auth'] ? basic_auth_header : {}
),
expects: [200, 201, 400, 401, 403],
auth: false
}.merge(endpoint_data)
)
if response && response['access_token']
debug { "Got response to refresh request" }
token['access_token'] = response['access_token']
token['refresh_token'] = response['refresh_token']
token['expires_at'] = in_to_at(response['expires_in'])
token.config.write if token.respond_to?(:config)
true
else
debug { "Got null or bad response to refresh request: #{last_response.inspect}" }
false
end
rescue => ex
error { "Access token refresh exception" }
error { ex }
false
end
private
# Returns true if the token object belongs to a master
#
# @return [Boolean]
def token_is_for_master?
token_account['name'] == 'master'
end
# Get prefixed request path unless path starts with /
#
# @param [String] path
# @return [String]
def path_with_prefix(path)
path.to_s.start_with?('/') ? path : "#{path_prefix}#{path}"
end
##
# Build request headers. Removes empty headers.
# @example
# request_headers('Authorization' => nil)
#
# @param [Hash] headers
# @return [Hash]
def request_headers(headers = {}, auth: true, gzip: true)
headers = default_headers.merge(headers)
headers.merge!(bearer_authorization_header) if auth
headers[ACCEPT_ENCODING] = GZIP if gzip
headers.reject{|_,v| v.nil? || (v.respond_to?(:empty?) && v.empty?)}
end
##
# Encode body based on content type.
#
# @param [Object] body
# @param [String] content_type
# @return [String] encoded_content
def encode_body(body, content_type)
if content_type =~ JSON_REGEX # vnd.api+json should pass as json
dump_json(body)
elsif content_type == CONTENT_URLENCODED && body.kind_of?(Hash)
URI.encode_www_form(body)
else
body
end
end
##
# Parse response. If the respons is JSON, returns a Hash representation.
# Otherwise returns the raw body.
#
# @param [Excon::Response]
# @return [Hash,String]
def check_version_and_warn(server_version)
return nil if $VERSION_WARNING_ADDED
return nil unless server_version.to_s =~ /^\d+\.\d+\.\d+/
unless server_version[/^(\d+\.\d+)/, 1] == Kontena::Cli::VERSION[/^(\d+\.\d+)/, 1] # Just compare x.y
add_version_warning(server_version)
$VERSION_WARNING_ADDED = true
end
end
def add_version_warning(server_version)
at_exit do
warn Kontena.pastel.yellow("Warning: Server version is #{server_version}. You are using CLI version #{Kontena::Cli::VERSION}.")
end
end
# Parse json
#
# @param response [Excon::Response]
# @return [Hash,Object,NilClass]
def parse_json(response)
return nil if response.body.empty?
JSON.parse(response.body)
rescue => ex
raise Kontena::Errors::StandardError.new(520, "Invalid response JSON from server for #{response.path}: #{ex.class.name}: #{ex.message}")
end
# Dump json
#
# @param [Object] obj
# @return [String]
def dump_json(obj)
JSON.dump(obj)
end
# @return [Boolean]
def ignore_ssl_errors?
ENV['SSL_IGNORE_ERRORS'] == 'true' || options[:ignore_ssl_errors]
end
# @param [Excon::Response] response
def handle_error_response(response)
data = parse_response(response)
request_path = " (#{response.path})"
if data.is_a?(Hash) && data.has_key?('error') && data['error'].is_a?(Hash)
raise Kontena::Errors::StandardErrorHash.new(response.status, response.reason_phrase, data['error'])
elsif data.is_a?(Hash) && data.has_key?('errors') && data['errors'].is_a?(Array) && data['errors'].all? { |e| e.is_a?(Hash) }
error_with_status = data['errors'].find { |error| error.key?('status') }
if error_with_status
status = error_with_status['status']
else
status = response.status
end
raise Kontena::Errors::StandardErrorHash.new(status, response.reason_phrase, data)
elsif data.is_a?(Hash) && data.has_key?('error')
raise Kontena::Errors::StandardError.new(response.status, data['error'] + request_path)
elsif data.is_a?(String) && !data.empty?
raise Kontena::Errors::StandardError.new(response.status, data + request_path)
else
raise Kontena::Errors::StandardError.new(response.status, response.reason_phrase + request_path)
end
end
# Convert expires_in into expires_at
#
# @param [Fixnum] seconds_till_expiration
# @return [Fixnum] expires_at_unix_timestamp
def in_to_at(expires_in)
if expires_in.to_i < 1
0
else
Time.now.utc.to_i + expires_in.to_i
end
end
end
|
radiant/radiant | app/helpers/radiant/application_helper.rb | Radiant.ApplicationHelper.pagination_for | ruby | def pagination_for(list, options={})
if list.respond_to? :total_pages
options = {
max_per_page: detail['pagination.max_per_page'] || 500,
depaginate: true
}.merge(options.symbolize_keys)
depaginate = options.delete(:depaginate) # supply depaginate: false to omit the 'show all' link
depagination_limit = options.delete(:max_per_page) # supply max_per_page: false to include the 'show all' link no matter how large the collection
html = will_paginate(list, will_paginate_options.merge(options))
if depaginate && list.total_pages > 1 && (!depagination_limit.blank? || list.total_entries <= depagination_limit.to_i)
html << content_tag(:div, link_to(t('show_all'), pp: 'all'), class: 'depaginate')
elsif depaginate && list.total_entries > depagination_limit.to_i
html = content_tag(:div, link_to("paginate", p: 1), class: 'pagination')
end
html
end
end | returns the usual set of pagination links.
options are passed through to will_paginate
and a 'show all' depagination link is added if relevant. | train | https://github.com/radiant/radiant/blob/5802d7bac2630a1959c463baa3aa7adcd0f497ee/app/helpers/radiant/application_helper.rb#L216-L232 | module ApplicationHelper
include Radiant::Admin::RegionsHelper
def detail
Radiant::Config
end
def default_page_title
title + ' - ' + subtitle
end
def title
detail['admin.title'] || 'Radiant CMS'
end
def subtitle
detail['admin.subtitle'] || 'Publishing for Small Teams'
end
def logged_in?
!current_user.nil?
end
def onsubmit_status(model)
model.new_record? ? t('creating_status', model: t(model.class.name.downcase)) : "#{I18n.t('saving_changes')}…"
end
def save_model_button(model, options = {})
model_name = model.class.name.underscore
human_model_name = model_name.humanize.titlecase
options[:label] ||= model.new_record? ?
t('buttons.create', name: t(model_name, default: human_model_name), default: 'Create ' + human_model_name) :
t('buttons.save_changes', default: 'Save Changes')
options[:class] ||= "button"
options[:accesskey] ||= 'S'
submit_tag options.delete(:label), options
end
def save_model_and_continue_editing_button(model)
submit_tag t('buttons.save_and_continue'), name: 'continue', class: 'button', accesskey: "s"
end
def current_item?(item)
if item.tab && item.tab.many? {|i| current_url?(i.relative_url) }
# Accept only stricter URL matches if more than one matches
current_page?(item.url)
else
current_url?(item.relative_url)
end
end
def current_tab?(tab)
@current_tab ||= tab if tab.any? {|item| current_url?(item.relative_url) }
@current_tab == tab
end
def current_url?(options)
url = case options
when Hash
url_for options
else
options.to_s
end
request.fullpath =~ Regexp.new('^' + Regexp.quote(clean(url)))
end
def clean(url)
uri = URI.parse(url)
uri.path.gsub(%r{/+}, '/').gsub(%r{/$}, '')
end
def nav_link_to(name, options)
if current_url?(options)
%{<strong>#{ link_to translate_with_default(name), options }</strong>}
else
link_to translate_with_default(name), options
end
end
def admin?
current_user and current_user.admin?
end
def designer?
current_user and (current_user.designer? or current_user.admin?)
end
def focus(field_name)
javascript_tag "Field.activate('#{field_name}');"
end
def updated_stamp(model)
unless model.new_record?
updated_by = (model.updated_by || model.created_by)
name = updated_by ? updated_by.name : nil
time = (model.updated_at || model.created_at)
if name or time
html = %{<p class="updated_line">#{t('timestamp.last_updated')} }
html << %{#{t('timestamp.by')} <strong>#{name}</strong> } if name
html << %{#{t('timestamp.at')} #{timestamp(time)}} if time
html << %{</p>}
html.html_safe
end
end
end
def timestamp(time)
# time.strftime("%I:%M %p on %B %e, %Y").sub("AM", 'am').sub("PM", 'pm')
I18n.localize(time, format: :timestamp)
end
def meta_visible(symbol)
v = case symbol
when :meta_more
not meta_errors?
when :meta, :meta_less
meta_errors?
end
v ? {} : {style: "display: none"}
end
def meta_errors?
false
end
def meta_label
meta_errors? ? 'Less' : 'More'
end
def toggle_javascript_for(id)
"Element.toggle('#{id}'); Element.toggle('more-#{id}'); Element.toggle('less-#{id}'); return false;"
end
def image(name, options = {})
image_tag(append_image_extension("admin/#{name}"), options)
end
def image_submit(name, options = {})
image_submit_tag(append_image_extension("admin/#{name}"), options)
end
def admin
require 'radiant/admin_ui'
Radiant::AdminUI.instance
end
def filter_options_for_select(selected=nil)
options_for_select([[t('select.none'), '']] + TextFilter.descendants_names, selected)
end
def body_classes
@body_classes ||= []
end
def nav_tabs
admin.nav
end
def translate_with_default(name)
t(name.underscore.downcase, default: name)
end
def available_locales_select
[[t('select.default'),'']] + Radiant::AvailableLocales.locales
end
def stylesheet_and_javascript_overrides
overrides = ''
if File.exist?("#{Rails.root}/public/stylesheets/admin/overrides.css") || File.exist?("#{Rails.root}/public/stylesheets/sass/admin/overrides.sass")
overrides << stylesheet_link_tag('admin/overrides')
end
if File.exist?("#{Rails.root}/public/javascripts/admin/overrides.js")
overrides << javascript_include_tag('admin/overrides')
end
overrides
end
# Returns a Gravatar URL associated with the email parameter.
# See: http://douglasfshearer.com/blog/gravatar-for-ruby-and-ruby-on-rails
def gravatar_url(email, options={})
# Default to highest rating. Rating can be one of G, PG, R X.
options[:rating] ||= "G"
# Default size of the image.
options[:size] ||= "32px"
# Default image url to be used when no gravatar is found
# or when an image exceeds the rating parameter.
local_avatar_url = "/assets/admin/avatar_#{([options[:size].to_i] * 2).join('x')}.png"
default_avatar_url = "#{request.protocol}#{request.host_with_port}#{ActionController::Base.relative_url_root}#{local_avatar_url}"
options[:default] ||= default_avatar_url
unless email.blank?
# Build the Gravatar url.
url = '//gravatar.com/avatar/'
url << "#{Digest::MD5.new.update(email)}?"
url << "rating=#{options[:rating]}" if options[:rating]
url << "&size=#{options[:size]}" if options[:size]
url << "&default=#{options[:default]}" if options[:default]
# Test the Gravatar url
require 'open-uri'
begin; open "http:#{url}", proxy: true
rescue; local_avatar_url
else; url
end
else
local_avatar_url
end
end
# returns the usual set of pagination links.
# options are passed through to will_paginate
# and a 'show all' depagination link is added if relevant.
private
def append_image_extension(name)
unless name =~ /\.(.*?)$/
name + '.png'
else
name
end
end
end
|
mojombo/chronic | lib/chronic/handlers.rb | Chronic.Handlers.handle_r | ruby | def handle_r(tokens, options)
dd_tokens = dealias_and_disambiguate_times(tokens, options)
get_anchor(dd_tokens, options)
end | anchors
Handle repeaters | train | https://github.com/mojombo/chronic/blob/2b1eae7ec440d767c09e0b1a7f0e9bcf30ce1d6c/lib/chronic/handlers.rb#L432-L435 | module Handlers
module_function
# Handle month/day
def handle_m_d(month, day, time_tokens, options)
month.start = self.now
span = month.this(options[:context])
year, month = span.begin.year, span.begin.month
day_start = Chronic.time_class.local(year, month, day)
day_start = Chronic.time_class.local(year + 1, month, day) if options[:context] == :future && day_start < now
day_or_time(day_start, time_tokens, options)
end
# Handle repeater-month-name/scalar-day
def handle_rmn_sd(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName)
day = tokens[1].get_tag(ScalarDay).type
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[2..tokens.size], options)
end
# Handle repeater-month-name/scalar-day with separator-on
def handle_rmn_sd_on(tokens, options)
if tokens.size > 3
month = tokens[2].get_tag(RepeaterMonthName)
day = tokens[3].get_tag(ScalarDay).type
token_range = 0..1
else
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(ScalarDay).type
token_range = 0..0
end
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[token_range], options)
end
# Handle repeater-month-name/ordinal-day
def handle_rmn_od(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName)
day = tokens[1].get_tag(OrdinalDay).type
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[2..tokens.size], options)
end
# Handle ordinal this month
def handle_od_rm(tokens, options)
day = tokens[0].get_tag(OrdinalDay).type
month = tokens[2].get_tag(RepeaterMonth)
handle_m_d(month, day, tokens[3..tokens.size], options)
end
# Handle ordinal-day/repeater-month-name
def handle_od_rmn(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[0].get_tag(OrdinalDay).type
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[2..tokens.size], options)
end
def handle_sy_rmn_od(tokens, options)
year = tokens[0].get_tag(ScalarYear).type
month = tokens[1].get_tag(RepeaterMonthName).index
day = tokens[2].get_tag(OrdinalDay).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle scalar-day/repeater-month-name
def handle_sd_rmn(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[0].get_tag(ScalarDay).type
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[2..tokens.size], options)
end
# Handle repeater-month-name/ordinal-day with separator-on
def handle_rmn_od_on(tokens, options)
if tokens.size > 3
month = tokens[2].get_tag(RepeaterMonthName)
day = tokens[3].get_tag(OrdinalDay).type
token_range = 0..1
else
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(OrdinalDay).type
token_range = 0..0
end
return if month_overflow?(self.now.year, month.index, day)
handle_m_d(month, day, tokens[token_range], options)
end
# Handle scalar-year/repeater-quarter-name
def handle_sy_rqn(tokens, options)
handle_rqn_sy(tokens[0..1].reverse, options)
end
# Handle repeater-quarter-name/scalar-year
def handle_rqn_sy(tokens, options)
year = tokens[1].get_tag(ScalarYear).type
quarter_tag = tokens[0].get_tag(RepeaterQuarterName)
quarter_tag.start = Chronic.construct(year)
quarter_tag.this(:none)
end
# Handle repeater-month-name/scalar-year
def handle_rmn_sy(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName).index
year = tokens[1].get_tag(ScalarYear).type
if month == 12
next_month_year = year + 1
next_month_month = 1
else
next_month_year = year
next_month_month = month + 1
end
begin
end_time = Chronic.time_class.local(next_month_year, next_month_month)
Span.new(Chronic.time_class.local(year, month), end_time)
rescue ArgumentError
nil
end
end
# Handle generic timestamp (ruby 1.8)
def handle_generic(tokens, options)
t = Chronic.time_class.parse(options[:text])
Span.new(t, t + 1)
rescue ArgumentError => e
raise e unless e.message =~ /out of range/
end
# Handle repeater-month-name/scalar-day/scalar-year
def handle_rmn_sd_sy(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName).index
day = tokens[1].get_tag(ScalarDay).type
year = tokens[2].get_tag(ScalarYear).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle repeater-month-name/ordinal-day/scalar-year
def handle_rmn_od_sy(tokens, options)
month = tokens[0].get_tag(RepeaterMonthName).index
day = tokens[1].get_tag(OrdinalDay).type
year = tokens[2].get_tag(ScalarYear).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle oridinal-day/repeater-month-name/scalar-year
def handle_od_rmn_sy(tokens, options)
day = tokens[0].get_tag(OrdinalDay).type
month = tokens[1].get_tag(RepeaterMonthName).index
year = tokens[2].get_tag(ScalarYear).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle scalar-day/repeater-month-name/scalar-year
def handle_sd_rmn_sy(tokens, options)
new_tokens = [tokens[1], tokens[0], tokens[2]]
time_tokens = tokens.last(tokens.size - 3)
handle_rmn_sd_sy(new_tokens + time_tokens, options)
end
# Handle scalar-month/scalar-day/scalar-year (endian middle)
def handle_sm_sd_sy(tokens, options)
month = tokens[0].get_tag(ScalarMonth).type
day = tokens[1].get_tag(ScalarDay).type
year = tokens[2].get_tag(ScalarYear).type
time_tokens = tokens.last(tokens.size - 3)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle scalar-day/scalar-month/scalar-year (endian little)
def handle_sd_sm_sy(tokens, options)
new_tokens = [tokens[1], tokens[0], tokens[2]]
time_tokens = tokens.last(tokens.size - 3)
handle_sm_sd_sy(new_tokens + time_tokens, options)
end
# Handle scalar-year/scalar-month/scalar-day
def handle_sy_sm_sd(tokens, options)
new_tokens = [tokens[1], tokens[2], tokens[0]]
time_tokens = tokens.last(tokens.size - 3)
handle_sm_sd_sy(new_tokens + time_tokens, options)
end
# Handle scalar-month/scalar-day
def handle_sm_sd(tokens, options)
month = tokens[0].get_tag(ScalarMonth).type
day = tokens[1].get_tag(ScalarDay).type
year = self.now.year
time_tokens = tokens.last(tokens.size - 2)
return if month_overflow?(year, month, day)
begin
day_start = Chronic.time_class.local(year, month, day)
if options[:context] == :future && day_start < now
day_start = Chronic.time_class.local(year + 1, month, day)
elsif options[:context] == :past && day_start > now
day_start = Chronic.time_class.local(year - 1, month, day)
end
day_or_time(day_start, time_tokens, options)
rescue ArgumentError
nil
end
end
# Handle scalar-day/scalar-month
def handle_sd_sm(tokens, options)
new_tokens = [tokens[1], tokens[0]]
time_tokens = tokens.last(tokens.size - 2)
handle_sm_sd(new_tokens + time_tokens, options)
end
def handle_year_and_month(year, month)
if month == 12
next_month_year = year + 1
next_month_month = 1
else
next_month_year = year
next_month_month = month + 1
end
begin
end_time = Chronic.time_class.local(next_month_year, next_month_month)
Span.new(Chronic.time_class.local(year, month), end_time)
rescue ArgumentError
nil
end
end
# Handle scalar-month/scalar-year
def handle_sm_sy(tokens, options)
month = tokens[0].get_tag(ScalarMonth).type
year = tokens[1].get_tag(ScalarYear).type
handle_year_and_month(year, month)
end
# Handle scalar-year/scalar-month
def handle_sy_sm(tokens, options)
year = tokens[0].get_tag(ScalarYear).type
month = tokens[1].get_tag(ScalarMonth).type
handle_year_and_month(year, month)
end
# Handle RepeaterDayName RepeaterMonthName OrdinalDay
def handle_rdn_rmn_od(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(OrdinalDay).type
time_tokens = tokens.last(tokens.size - 3)
year = self.now.year
return if month_overflow?(year, month.index, day)
begin
if time_tokens.empty?
start_time = Chronic.time_class.local(year, month.index, day)
end_time = time_with_rollover(year, month.index, day + 1)
Span.new(start_time, end_time)
else
day_start = Chronic.time_class.local(year, month.index, day)
day_or_time(day_start, time_tokens, options)
end
rescue ArgumentError
nil
end
end
# Handle RepeaterDayName RepeaterMonthName OrdinalDay ScalarYear
def handle_rdn_rmn_od_sy(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(OrdinalDay).type
year = tokens[3].get_tag(ScalarYear).type
return if month_overflow?(year, month.index, day)
begin
start_time = Chronic.time_class.local(year, month.index, day)
end_time = time_with_rollover(year, month.index, day + 1)
Span.new(start_time, end_time)
rescue ArgumentError
nil
end
end
# Handle RepeaterDayName OrdinalDay
def handle_rdn_od(tokens, options)
day = tokens[1].get_tag(OrdinalDay).type
time_tokens = tokens.last(tokens.size - 2)
year = self.now.year
month = self.now.month
if options[:context] == :future
self.now.day > day ? month += 1 : month
end
return if month_overflow?(year, month, day)
begin
if time_tokens.empty?
start_time = Chronic.time_class.local(year, month, day)
end_time = time_with_rollover(year, month, day + 1)
Span.new(start_time, end_time)
else
day_start = Chronic.time_class.local(year, month, day)
day_or_time(day_start, time_tokens, options)
end
rescue ArgumentError
nil
end
end
# Handle RepeaterDayName RepeaterMonthName ScalarDay
def handle_rdn_rmn_sd(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(ScalarDay).type
time_tokens = tokens.last(tokens.size - 3)
year = self.now.year
return if month_overflow?(year, month.index, day)
begin
if time_tokens.empty?
start_time = Chronic.time_class.local(year, month.index, day)
end_time = time_with_rollover(year, month.index, day + 1)
Span.new(start_time, end_time)
else
day_start = Chronic.time_class.local(year, month.index, day)
day_or_time(day_start, time_tokens, options)
end
rescue ArgumentError
nil
end
end
# Handle RepeaterDayName RepeaterMonthName ScalarDay ScalarYear
def handle_rdn_rmn_sd_sy(tokens, options)
month = tokens[1].get_tag(RepeaterMonthName)
day = tokens[2].get_tag(ScalarDay).type
year = tokens[3].get_tag(ScalarYear).type
return if month_overflow?(year, month.index, day)
begin
start_time = Chronic.time_class.local(year, month.index, day)
end_time = time_with_rollover(year, month.index, day + 1)
Span.new(start_time, end_time)
rescue ArgumentError
nil
end
end
def handle_sm_rmn_sy(tokens, options)
day = tokens[0].get_tag(ScalarDay).type
month = tokens[1].get_tag(RepeaterMonthName).index
year = tokens[2].get_tag(ScalarYear).type
if tokens.size > 3
time = get_anchor([tokens.last], options).begin
h, m, s = time.hour, time.min, time.sec
time = Chronic.time_class.local(year, month, day, h, m, s)
end_time = Chronic.time_class.local(year, month, day + 1, h, m, s)
else
time = Chronic.time_class.local(year, month, day)
day += 1 unless day >= 31
end_time = Chronic.time_class.local(year, month, day)
end
Span.new(time, end_time)
end
# anchors
# Handle repeaters
# Handle repeater/grabber/repeater
def handle_r_g_r(tokens, options)
new_tokens = [tokens[1], tokens[0], tokens[2]]
handle_r(new_tokens, options)
end
# arrows
# Handle scalar/repeater/pointer helper
def handle_srp(tokens, span, options)
distance = tokens[0].get_tag(Scalar).type
repeater = tokens[1].get_tag(Repeater)
pointer = tokens[2].get_tag(Pointer).type
repeater.offset(span, distance, pointer) if repeater.respond_to?(:offset)
end
# Handle scalar/repeater/pointer
def handle_s_r_p(tokens, options)
span = Span.new(self.now, self.now + 1)
handle_srp(tokens, span, options)
end
# Handle pointer/scalar/repeater
def handle_p_s_r(tokens, options)
new_tokens = [tokens[1], tokens[2], tokens[0]]
handle_s_r_p(new_tokens, options)
end
# Handle scalar/repeater/pointer/anchor
def handle_s_r_p_a(tokens, options)
anchor_span = get_anchor(tokens[3..tokens.size - 1], options)
handle_srp(tokens, anchor_span, options)
end
# Handle repeater/scalar/repeater/pointer
def handle_rmn_s_r_p(tokens, options)
handle_s_r_p_a(tokens[1..3] + tokens[0..0], options)
end
def handle_s_r_a_s_r_p_a(tokens, options)
anchor_span = get_anchor(tokens[4..tokens.size - 1], options)
span = handle_srp(tokens[0..1]+tokens[4..6], anchor_span, options)
handle_srp(tokens[2..3]+tokens[4..6], span, options)
end
# narrows
# Handle oridinal repeaters
def handle_orr(tokens, outer_span, options)
repeater = tokens[1].get_tag(Repeater)
repeater.start = outer_span.begin - 1
ordinal = tokens[0].get_tag(Ordinal).type
span = nil
ordinal.times do
span = repeater.next(:future)
if span.begin >= outer_span.end
span = nil
break
end
end
span
end
# Handle ordinal/repeater/separator/repeater
def handle_o_r_s_r(tokens, options)
outer_span = get_anchor([tokens[3]], options)
handle_orr(tokens[0..1], outer_span, options)
end
# Handle ordinal/repeater/grabber/repeater
def handle_o_r_g_r(tokens, options)
outer_span = get_anchor(tokens[2..3], options)
handle_orr(tokens[0..1], outer_span, options)
end
# support methods
def day_or_time(day_start, time_tokens, options)
outer_span = Span.new(day_start, day_start + (24 * 60 * 60))
unless time_tokens.empty?
self.now = outer_span.begin
get_anchor(dealias_and_disambiguate_times(time_tokens, options), options.merge(:context => :future))
else
outer_span
end
end
def get_anchor(tokens, options)
grabber = Grabber.new(:this)
pointer = :future
repeaters = get_repeaters(tokens)
repeaters.size.times { tokens.pop }
if tokens.first && tokens.first.get_tag(Grabber)
grabber = tokens.shift.get_tag(Grabber)
end
head = repeaters.shift
head.start = self.now
case grabber.type
when :last
outer_span = head.next(:past)
when :this
if options[:context] != :past and repeaters.size > 0
outer_span = head.this(:none)
else
outer_span = head.this(options[:context])
end
when :next
outer_span = head.next(:future)
else
raise 'Invalid grabber'
end
if Chronic.debug
puts "Handler-class: #{head.class}"
puts "--#{outer_span}"
end
find_within(repeaters, outer_span, pointer)
end
def get_repeaters(tokens)
tokens.map { |token| token.get_tag(Repeater) }.compact.sort.reverse
end
def month_overflow?(year, month, day)
if ::Date.leap?(year)
day > RepeaterMonth::MONTH_DAYS_LEAP[month - 1]
else
day > RepeaterMonth::MONTH_DAYS[month - 1]
end
rescue ArgumentError
false
end
# Recursively finds repeaters within other repeaters.
# Returns a Span representing the innermost time span
# or nil if no repeater union could be found
def find_within(tags, span, pointer)
puts "--#{span}" if Chronic.debug
return span if tags.empty?
head = tags.shift
head.start = (pointer == :future ? span.begin : span.end)
h = head.this(:none)
if span.cover?(h.begin) || span.cover?(h.end)
find_within(tags, h, pointer)
end
end
def time_with_rollover(year, month, day)
date_parts =
if month_overflow?(year, month, day)
if month == 12
[year + 1, 1, 1]
else
[year, month + 1, 1]
end
else
[year, month, day]
end
Chronic.time_class.local(*date_parts)
end
def dealias_and_disambiguate_times(tokens, options)
# handle aliases of am/pm
# 5:00 in the morning -> 5:00 am
# 7:00 in the evening -> 7:00 pm
day_portion_index = nil
tokens.each_with_index do |t, i|
if t.get_tag(RepeaterDayPortion)
day_portion_index = i
break
end
end
time_index = nil
tokens.each_with_index do |t, i|
if t.get_tag(RepeaterTime)
time_index = i
break
end
end
if day_portion_index && time_index
t1 = tokens[day_portion_index]
t1tag = t1.get_tag(RepeaterDayPortion)
case t1tag.type
when :morning
puts '--morning->am' if Chronic.debug
t1.untag(RepeaterDayPortion)
t1.tag(RepeaterDayPortion.new(:am))
when :afternoon, :evening, :night
puts "--#{t1tag.type}->pm" if Chronic.debug
t1.untag(RepeaterDayPortion)
t1.tag(RepeaterDayPortion.new(:pm))
end
end
# handle ambiguous times if :ambiguous_time_range is specified
if options[:ambiguous_time_range] != :none
ambiguous_tokens = []
tokens.each_with_index do |token, i|
ambiguous_tokens << token
next_token = tokens[i + 1]
if token.get_tag(RepeaterTime) && token.get_tag(RepeaterTime).type.ambiguous? && (!next_token || !next_token.get_tag(RepeaterDayPortion))
distoken = Token.new('disambiguator')
distoken.tag(RepeaterDayPortion.new(options[:ambiguous_time_range]))
ambiguous_tokens << distoken
end
end
tokens = ambiguous_tokens
end
tokens
end
end
|
ideonetwork/lato-blog | app/controllers/lato_blog/back/categories_controller.rb | LatoBlog.Back::CategoriesController.index | ruby | def index
core__set_header_active_page_title(LANGUAGES[:lato_blog][:pages][:categories])
# find categories to show
@categories = LatoBlog::Category.where(meta_language: cookies[:lato_blog__current_language]).order('title ASC')
@widget_index_categories = core__widgets_index(@categories, search: 'title', pagination: 10)
end | This function shows the list of possible categories. | train | https://github.com/ideonetwork/lato-blog/blob/a0d92de299a0e285851743b9d4a902f611187cba/app/controllers/lato_blog/back/categories_controller.rb#L9-L14 | class Back::CategoriesController < Back::BackController
before_action do
core__set_menu_active_item('blog_articles')
end
# This function shows the list of possible categories.
# This function shows a single category. It create a redirect to the edit path.
def show
# use edit as default post show page
redirect_to lato_blog.edit_category_path(params[:id])
end
# This function shows the view to create a new category.
def new
core__set_header_active_page_title(LANGUAGES[:lato_blog][:pages][:categories_new])
@category = LatoBlog::Category.new
if params[:language]
set_current_language params[:language]
end
if params[:parent]
@category_parent = LatoBlog::CategoryParent.find_by(id: params[:parent])
end
fetch_external_objects
end
# This function creates a new category.
def create
@category = LatoBlog::Category.new(new_category_params)
if [email protected]
flash[:danger] = @category.errors.full_messages.to_sentence
redirect_to lato_blog.new_category_path
return
end
flash[:success] = LANGUAGES[:lato_blog][:flashes][:category_create_success]
redirect_to lato_blog.category_path(@category.id)
end
# This function show the view to edit a category.
def edit
core__set_header_active_page_title(LANGUAGES[:lato_blog][:pages][:categories_edit])
@category = LatoBlog::Category.find_by(id: params[:id])
return unless check_category_presence
if @category.meta_language != cookies[:lato_blog__current_language]
set_current_language @category.meta_language
end
fetch_external_objects
end
# This function updates a category.
def update
@category = LatoBlog::Category.find_by(id: params[:id])
return unless check_category_presence
if [email protected](edit_category_params)
flash[:danger] = @category.errors.full_messages.to_sentence
redirect_to lato_blog.edit_category_path(@category.id)
return
end
flash[:success] = LANGUAGES[:lato_blog][:flashes][:category_update_success]
redirect_to lato_blog.category_path(@category.id)
end
# This function destroyes a category.
def destroy
@category = LatoBlog::Category.find_by(id: params[:id])
return unless check_category_presence
if [email protected]
flash[:danger] = @category.category_parent.errors.full_messages.to_sentence
redirect_to lato_blog.edit_category_path(@category.id)
return
end
flash[:success] = LANGUAGES[:lato_blog][:flashes][:category_destroy_success]
redirect_to lato_blog.categories_path(status: 'deleted')
end
private
def fetch_external_objects
@categories_list = LatoBlog::Category.where(meta_language: cookies[:lato_blog__current_language]).where.not(
id: @category.id).map { |cat| { title: cat.title, value: cat.id } }
end
# This function checks the @category variable is present and redirect to index if it not exist.
def check_category_presence
if !@category
flash[:warning] = LANGUAGES[:lato_blog][:flashes][:category_not_found]
redirect_to lato_blog.categories_path
return false
end
return true
end
# Params helpers:
# This function generate params for a new category.
def new_category_params
# take params from front-end request
category_params = params.require(:category).permit(:title, :lato_blog_category_id).to_h
# add current superuser id
category_params[:lato_core_superuser_creator_id] = @core__current_superuser.id
# add post parent id
category_params[:lato_blog_category_parent_id] = (params[:parent] && !params[:parent].blank? ? params[:parent] : generate_category_parent)
# add metadata
category_params[:meta_language] = cookies[:lato_blog__current_language]
# return final post object
return category_params
end
# This function generate params for a edit category.
def edit_category_params
params.require(:category).permit(:title, :lato_blog_category_id, :meta_permalink)
end
# This function generate and save a new category parent and return the id.
def generate_category_parent
category_parent = LatoBlog::CategoryParent.create
return category_parent.id
end
end
|
mongodb/mongo-ruby-driver | lib/mongo/address.rb | Mongo.Address.create_resolver | ruby | def create_resolver(ssl_options)
return Unix.new(seed.downcase) if seed.downcase =~ Unix::MATCH
family = (host == LOCALHOST) ? ::Socket::AF_INET : ::Socket::AF_UNSPEC
error = nil
::Socket.getaddrinfo(host, nil, family, ::Socket::SOCK_STREAM).each do |info|
begin
specific_address = FAMILY_MAP[info[4]].new(info[3], port, host)
socket = specific_address.socket(
connect_timeout, ssl_options, connect_timeout: connect_timeout)
socket.close
return specific_address
rescue IOError, SystemCallError, Error::SocketTimeoutError, Error::SocketError => e
error = e
end
end
raise error
end | To determine which address the socket will connect to, the driver will
attempt to connect to each IP address returned by Socket::getaddrinfo in
sequence. Once a successful connection is made, a resolver with that
IP address specified is returned. If no successful connection is
made, the error made by the last connection attempt is raised. | train | https://github.com/mongodb/mongo-ruby-driver/blob/dca26d0870cb3386fad9ccc1d17228097c1fe1c8/lib/mongo/address.rb#L193-L210 | class Address
extend Forwardable
# Mapping from socket family to resolver class.
#
# @since 2.0.0
FAMILY_MAP = {
::Socket::PF_UNIX => Unix,
::Socket::AF_INET6 => IPv6,
::Socket::AF_INET => IPv4
}.freeze
# The localhost constant.
#
# @since 2.1.0
LOCALHOST = 'localhost'.freeze
# Initialize the address.
#
# @example Initialize the address with a DNS entry and port.
# Mongo::Address.new("app.example.com:27017")
#
# @example Initialize the address with a DNS entry and no port.
# Mongo::Address.new("app.example.com")
#
# @example Initialize the address with an IPV4 address and port.
# Mongo::Address.new("127.0.0.1:27017")
#
# @example Initialize the address with an IPV4 address and no port.
# Mongo::Address.new("127.0.0.1")
#
# @example Initialize the address with an IPV6 address and port.
# Mongo::Address.new("[::1]:27017")
#
# @example Initialize the address with an IPV6 address and no port.
# Mongo::Address.new("[::1]")
#
# @example Initialize the address with a unix socket.
# Mongo::Address.new("/path/to/socket.sock")
#
# @param [ String ] seed The provided address.
# @param [ Hash ] options The address options.
#
# @since 2.0.0
def initialize(seed, options = {})
@seed = seed
@host, @port = parse_host_port
@options = options
end
# @return [ String ] seed The seed address.
attr_reader :seed
# @return [ String ] host The original host name.
attr_reader :host
# @return [ Integer ] port The port.
attr_reader :port
# Check equality of the address to another.
#
# @example Check address equality.
# address == other
#
# @param [ Object ] other The other object.
#
# @return [ true, false ] If the objects are equal.
#
# @since 2.0.0
def ==(other)
return false unless other.is_a?(Address)
host == other.host && port == other.port
end
# Check equality for hashing.
#
# @example Check hashing equality.
# address.eql?(other)
#
# @param [ Object ] other The other object.
#
# @return [ true, false ] If the objects are equal.
#
# @since 2.2.0
def eql?(other)
self == other
end
# Calculate the hash value for the address.
#
# @example Calculate the hash value.
# address.hash
#
# @return [ Integer ] The hash value.
#
# @since 2.0.0
def hash
[ host, port ].hash
end
# Get a pretty printed address inspection.
#
# @example Get the address inspection.
# address.inspect
#
# @return [ String ] The nice inspection string.
#
# @since 2.0.0
def inspect
"#<Mongo::Address:0x#{object_id} address=#{to_s}>"
end
# Get a socket for the provided address, given the options.
#
# The address the socket connects to is determined by the algorithm described in the
# #intialize_resolver! documentation. Each time this method is called, #initialize_resolver!
# will be called, meaning that a new hostname lookup will occur. This is done so that any
# changes to which addresses the hostname resolves to will be picked up even if a socket has
# been connected to it before.
#
# @example Get a socket.
# address.socket(5, :ssl => true)
#
# @param [ Float ] socket_timeout The socket timeout.
# @param [ Hash ] ssl_options SSL options.
# @param [ Hash ] options The options.
#
# @option options [ Float ] :connect_timeout Connect timeout.
#
# @return [ Mongo::Socket::SSL, Mongo::Socket::TCP, Mongo::Socket::Unix ] The socket.
#
# @since 2.0.0
def socket(socket_timeout, ssl_options = {}, options = {})
create_resolver(ssl_options).socket(socket_timeout, ssl_options, options)
end
# Get the address as a string.
#
# @example Get the address as a string.
# address.to_s
#
# @return [ String ] The nice string.
#
# @since 2.0.0
def to_s
if port
if host.include?(':')
"[#{host}]:#{port}"
else
"#{host}:#{port}"
end
else
host
end
end
# @api private
def connect_timeout
@connect_timeout ||= @options[:connect_timeout] || Server::CONNECT_TIMEOUT
end
private
# To determine which address the socket will connect to, the driver will
# attempt to connect to each IP address returned by Socket::getaddrinfo in
# sequence. Once a successful connection is made, a resolver with that
# IP address specified is returned. If no successful connection is
# made, the error made by the last connection attempt is raised.
def parse_host_port
address = seed.downcase
case address
when Unix::MATCH then Unix.parse(address)
when IPv6::MATCH then IPv6.parse(address)
else IPv4.parse(address)
end
end
end
|
wvanbergen/request-log-analyzer | lib/request_log_analyzer/controller.rb | RequestLogAnalyzer.Controller.handle_progress | ruby | def handle_progress(message, value = nil)
case message
when :started
@progress_bar = CommandLine::ProgressBar.new(File.basename(value), File.size(value), STDERR)
when :finished
@progress_bar.finish
@progress_bar = nil
when :interrupted
if @progress_bar
@progress_bar.halt
@progress_bar = nil
end
when :progress
@progress_bar.set(value)
end
end | Builds a new Controller for the given log file format.
<tt>format</tt> Logfile format. Defaults to :rails
Options are passd on to the LogParser.
* <tt>:database</tt> Database the controller should use.
* <tt>:yaml</tt> Yaml Dump the contrller should use.
* <tt>:output</tt> All report outputs get << through this output.
* <tt>:no_progress</tt> No progress bar
* <tt>:silent</tt> Minimal output, only error
Progress function.
Expects :started with file, :progress with current line and :finished or :interrupted when done.
<tt>message</tt> Current state (:started, :finished, :interupted or :progress).
<tt>value</tt> File or current line. | train | https://github.com/wvanbergen/request-log-analyzer/blob/b83865d440278583ac8e4901bb33878244fd7c75/lib/request_log_analyzer/controller.rb#L264-L279 | class Controller
attr_reader :source, :filters, :aggregators, :output, :options
# Builds a RequestLogAnalyzer::Controller given parsed command line arguments
# <tt>arguments<tt> A CommandLine::Arguments hash containing parsed commandline parameters.
def self.build_from_arguments(arguments)
options = {}
# Copy fields
options[:database] = arguments[:database]
options[:reset_database] = arguments[:reset_database]
options[:debug] = arguments[:debug]
options[:yaml] = arguments[:yaml] || arguments[:dump]
options[:mail] = arguments[:mail]
options[:no_progress] = arguments[:no_progress]
options[:format] = arguments[:format]
options[:output] = arguments[:output]
options[:file] = arguments[:file]
options[:after] = arguments[:after]
options[:before] = arguments[:before]
options[:reject] = arguments[:reject]
options[:select] = arguments[:select]
options[:boring] = arguments[:boring]
options[:aggregator] = arguments[:aggregator]
options[:report_width] = arguments[:report_width]
options[:report_sort] = arguments[:report_sort]
options[:report_amount] = arguments[:report_amount]
options[:mailhost] = arguments[:mailhost]
options[:mailfrom] = arguments[:mailfrom]
options[:mailfrom_name] = arguments[:mailfrom_name]
options[:mailsubject] = arguments[:mailsubject]
options[:silent] = arguments[:silent]
options[:parse_strategy] = arguments[:parse_strategy]
# Apache format workaround
if arguments[:rails_format]
options[:format] = { rails: arguments[:rails_format] }
elsif arguments[:apache_format]
options[:format] = { apache: arguments[:apache_format] }
end
# Handle output format casing
if options[:output].class == String
options[:output] = 'HTML' if options[:output] =~ /^html$/i
options[:output] = 'FixedWidth' if options[:output] =~ /^fixed_?width$/i
end
# Register sources
if arguments.parameters.length == 1
file = arguments.parameters[0]
if file == '-' || file == 'STDIN'
options.store(:source_files, $stdin)
elsif File.exist?(file)
options.store(:source_files, file)
else
puts "File not found: #{file}"
exit(0)
end
else
options.store(:source_files, arguments.parameters)
end
# Guess file format
if !options[:format] && options[:source_files]
options[:format] = :rails3 # Default
if options[:source_files] != $stdin
if options[:source_files].class == String
options[:format] = RequestLogAnalyzer::FileFormat.autodetect(options[:source_files])
elsif options[:source_files].class == Array && options[:source_files].first != $stdin
options[:format] = RequestLogAnalyzer::FileFormat.autodetect(options[:source_files].first)
end
end
end
build(options)
end
# Build a new controller.
# Returns a new RequestLogAnalyzer::Controller object.
#
# Options
# * <tt>:after</tt> Drop all requests after this date (Date, DateTime, Time, or a String in "YYYY-MM-DD hh:mm:ss" format)
# * <tt>:aggregator</tt> Array of aggregators (Strings or Symbols for the builtin aggregators or a RequestLogAnalyzer::Aggregator class - Defaults to [:summarizer]).
# * <tt>:boring</tt> Do not show color on STDOUT (Defaults to false).
# * <tt>:before</tt> Drop all requests before this date (Date, DateTime, Time or a String in "YYYY-MM-DD hh:mm:ss" format)
# * <tt>:database</tt> Database file to insert encountered requests to.
# * <tt>:debug</tt> Enables echo aggregator which will echo each request analyzed.
# * <tt>:file</tt> Filestring, File or StringIO.
# * <tt>:format</tt> :rails, {:apache => 'FORMATSTRING'}, :merb, :amazon_s3, :mysql or RequestLogAnalyzer::FileFormat class. (Defaults to :rails).
# * <tt>:mail</tt> Email the results to this email address.
# * <tt>:mailhost</tt> Email the results to this mail server.
# * <tt>:mailfrom</tt> Set the Email sender address.
# * <tt>:mailfrom_alias</tt> Set the Email sender name.
# * <tt>:mailsubject</tt> Email subject.
# * <tt>:no_progress</tt> Do not display the progress bar (increases parsing speed).
# * <tt>:output</tt> 'FixedWidth', 'HTML' or RequestLogAnalyzer::Output class. Defaults to 'FixedWidth'.
# * <tt>:reject</tt> Reject specific {:field => :value} combination (expects a single hash).
# * <tt>:report_width</tt> Width of reports in characters for FixedWidth reports. (Defaults to 80)
# * <tt>:reset_database</tt> Reset the database before starting.
# * <tt>:select</tt> Select specific {:field => :value} combination (expects a single hash).
# * <tt>:source_files</tt> Source files to analyze. Provide either File, array of files or STDIN.
# * <tt>:yaml</tt> Output to YAML file.
# * <tt>:silent</tt> Minimal output automatically implies :no_progress
# * <tt>:source</tt> The class to instantiate to grab the requestes, must be a RequestLogAnalyzer::Source::Base descendant. (Defaults to RequestLogAnalyzer::Source::LogParser)
#
# === Example
# RequestLogAnalyzer::Controller.build(
# :output => :HTML,
# :mail => 'root@localhost',
# :after => Time.now - 24*60*60,
# :source_files => '/var/log/passenger.log'
# ).run!
#
# === Todo
# * Check if defaults work (Aggregator defaults seem wrong).
# * Refactor :database => options[:database], :dump => options[:dump] away from contoller intialization.
def self.build(options)
# Defaults
options[:output] ||= 'FixedWidth'
options[:format] ||= :rails
options[:aggregator] ||= [:summarizer]
options[:report_width] ||= 80
options[:report_amount] ||= 20
options[:report_sort] ||= 'sum,mean'
options[:boring] ||= false
options[:silent] ||= false
options[:source] ||= RequestLogAnalyzer::Source::LogParser
options[:no_progress] = true if options[:silent]
# Deprecation warnings
if options[:dump]
warn '[DEPRECATION] `:dump` is deprecated. Please use `:yaml` instead.'
options[:yaml] = options[:dump]
end
# Set the output class
output_args = {}
output_object = nil
if options[:output].is_a?(Class)
output_class = options[:output]
else
output_class = RequestLogAnalyzer::Output.const_get(options[:output])
end
output_sort = options[:report_sort].split(',').map { |s| s.to_sym }
output_amount = options[:report_amount] == 'all' ? :all : options[:report_amount].to_i
if options[:file]
output_object = %w( File StringIO ).include?(options[:file].class.name) ? options[:file] : File.new(options[:file], 'w+')
output_args = { width: 80, color: false, characters: :ascii, sort: output_sort, amount: output_amount }
elsif options[:mail]
output_object = RequestLogAnalyzer::Mailer.new(options[:mail], options[:mailhost], subject: options[:mailsubject], from: options[:mailfrom], from_alias: options[:mailfrom_name])
output_args = { width: 80, color: false, characters: :ascii, sort: output_sort, amount: output_amount }
else
output_object = STDOUT
output_args = { width: options[:report_width].to_i, color: !options[:boring],
characters: (options[:boring] ? :ascii : :utf), sort: output_sort, amount: output_amount }
end
output_instance = output_class.new(output_object, output_args)
# Create the controller with the correct file format
if options[:format].is_a?(Hash)
file_format = RequestLogAnalyzer::FileFormat.load(options[:format].keys[0], options[:format].values[0])
else
file_format = RequestLogAnalyzer::FileFormat.load(options[:format])
end
# Kickstart the controller
controller =
Controller.new(options[:source].new(file_format,
source_files: options[:source_files],
parse_strategy: options[:parse_strategy]),
output: output_instance,
database: options[:database], # FUGLY!
yaml: options[:yaml],
reset_database: options[:reset_database],
no_progress: options[:no_progress],
silent: options[:silent]
)
# register filters
if options[:after] || options[:before]
filter_options = {}
[:after, :before].each do |filter|
case options[filter]
when Date, DateTime, Time
filter_options[filter] = options[filter]
when String
filter_options[filter] = DateTime.parse(options[filter])
end
end
controller.add_filter(:timespan, filter_options)
end
if options[:reject]
options[:reject].each do |(field, value)|
controller.add_filter(:field, mode: :reject, field: field, value: value)
end
end
if options[:select]
options[:select].each do |(field, value)|
controller.add_filter(:field, mode: :select, field: field, value: value)
end
end
# register aggregators
options[:aggregator].each { |agg| controller.add_aggregator(agg) }
controller.add_aggregator(:summarizer) if options[:aggregator].empty?
controller.add_aggregator(:echo) if options[:debug]
controller.add_aggregator(:database_inserter) if options[:database] && !options[:aggregator].include?('database')
file_format.setup_environment(controller)
controller
end
# Builds a new Controller for the given log file format.
# <tt>format</tt> Logfile format. Defaults to :rails
# Options are passd on to the LogParser.
# * <tt>:database</tt> Database the controller should use.
# * <tt>:yaml</tt> Yaml Dump the contrller should use.
# * <tt>:output</tt> All report outputs get << through this output.
# * <tt>:no_progress</tt> No progress bar
# * <tt>:silent</tt> Minimal output, only error
def initialize(source, options = {})
@source = source
@options = options
@aggregators = []
@filters = []
@output = options[:output]
@interrupted = false
# Register the request format for this session after checking its validity
fail 'Invalid file format!' unless @source.file_format.valid?
# Install event handlers for wrnings, progress updates and source changes
@source.warning = lambda { |type, message, lineno| @aggregators.each { |agg| agg.warning(type, message, lineno) } }
@source.progress = lambda { |message, value| handle_progress(message, value) } unless options[:no_progress]
@source.source_changes = lambda { |change, filename| handle_source_change(change, filename) }
end
# Progress function.
# Expects :started with file, :progress with current line and :finished or :interrupted when done.
# <tt>message</tt> Current state (:started, :finished, :interupted or :progress).
# <tt>value</tt> File or current line.
# Source change handler
def handle_source_change(change, filename)
@aggregators.each { |agg| agg.source_change(change, File.expand_path(filename, Dir.pwd)) }
end
# Adds an aggregator to the controller. The aggregator will be called for every request
# that is parsed from the provided sources (see add_source)
def add_aggregator(agg)
agg = RequestLogAnalyzer::Aggregator.const_get(RequestLogAnalyzer.to_camelcase(agg)) if agg.is_a?(String) || agg.is_a?(Symbol)
@aggregators << agg.new(@source, @options)
end
alias_method :>>, :add_aggregator
# Adds a request filter to the controller.
def add_filter(filter, filter_options = {})
filter = RequestLogAnalyzer::Filter.const_get(RequestLogAnalyzer.to_camelcase(filter)) if filter.is_a?(Symbol)
@filters << filter.new(source.file_format, @options.merge(filter_options))
end
# Push a request through the entire filterchain (@filters).
# <tt>request</tt> The request to filter.
# Returns the filtered request or nil.
def filter_request(request)
@filters.each do |filter|
request = filter.filter(request)
return nil if request.nil?
end
request
end
# Push a request to all the aggregators (@aggregators).
# <tt>request</tt> The request to push to the aggregators.
def aggregate_request(request)
return false unless request
@aggregators.each { |agg| agg.aggregate(request) }
true
end
# Runs RequestLogAnalyzer
# 1. Call prepare on every aggregator
# 2. Generate requests from source object
# 3. Filter out unwanted requests
# 4. Call aggregate for remaning requests on every aggregator
# 4. Call finalize on every aggregator
# 5. Call report on every aggregator
# 6. Finalize Source
def run!
# @aggregators.each{|agg| p agg}
@aggregators.each { |agg| agg.prepare }
install_signal_handlers
@source.each_request do |request|
break if @interrupted
aggregate_request(filter_request(request))
end
@aggregators.each { |agg| agg.finalize }
@output.header
@aggregators.each { |agg| agg.report(@output) }
@output.footer
@source.finalize
if @output.io.is_a?(File)
unless @options[:silent]
puts
puts 'Report written to: ' + File.expand_path(@output.io.path)
puts 'Need an expert to analyze your application?'
puts 'Mail to [email protected] or visit us at http://railsdoctors.com'
puts 'Thanks for using request-log-analyzer!'
end
@output.io.close
elsif @output.io.is_a?(RequestLogAnalyzer::Mailer)
@output.io.mail
end
end
def install_signal_handlers
Signal.trap('INT') do
handle_progress(:interrupted)
puts 'Caught interrupt! Stopping parsing...'
@interrupted = true
end
end
end
|
klacointe/has_media | lib/has_media.rb | HasMedia.ClassMethods.create_one_accessors | ruby | def create_one_accessors(context, options)
define_method(context) do
media.with_context(context.to_sym).first
end
module_eval <<-"end;", __FILE__, __LINE__
def #{context}=(value)
return if value.blank?
medium = Medium.new_from_value(self, value, "#{context}", "#{options[:encode]}", "#{options[:only]}")
if medium
@old_media ||= []
@old_media += media.with_context("#{context}")
media << medium
end
end
end;
end | create_one_accessors
Create needed accessors on master object for unique relation
@param [String] context
@param [Hash] options | train | https://github.com/klacointe/has_media/blob/a886d36a914d8244f3761455458b9d0226fa22d5/lib/has_media.rb#L262-L278 | module ClassMethods
##
# has_one_medium
# Define a class method to link to a medium
#
# @param [String] context, the context (or accessor) to link medium
# @param [Hash] options, can be one of : encode, only
#
def has_one_medium(context, options = {})
set_relations(context, :has_one)
set_general_methods
create_one_accessors(context, options)
end
##
# has_many_media
# Define a class method to link to several media
#
# @param [String] context, the context (or accessor) to link media
# @param [Hash] options, can be one of : encode, only
#
def has_many_media(context, options = {})
set_relations(context, :has_many)
set_general_methods
create_many_accessors(context, options)
end
##
# set_general_methods
# Add generic methods for has_one_medium and has_many_media
# Including media_links relation, accessors, callbacks, validation ...
#
def set_general_methods
@methods_present ||= false
unless @methods_present
set_media_links_relation
set_attributes
set_validate_methods
set_callbacks
end
@methods_present = true
end
##
# set_relations
# add relation on medium if not exists
# Also check if a class has a duplicate context
#
# @param [String] context
# @param [String] relation type, one of :has_many, :has_one
#
def set_relations(context, relation)
@contexts ||= {}
@contexts[relation] ||= []
@media_relation_set ||= []
if @contexts[relation].include?(context)
raise Exception.new("You should NOT use same context identifier for several has_one or has_many relation to media")
end
@contexts[relation] << context
return if @media_relation_set.include? self
has_many :media, :through => :media_links, :dependent => :destroy
@media_relation_set << self
end
##
# set_callbacks
# Add callbacks to :
# - merge medium errors to class related errors
# - destroy medium
#
def set_callbacks
validate :merge_media_errors
before_save :remove_old_media
end
##
# set_attributes
# Add media_errors attributes to store medium errors
#
def set_attributes
attr_accessor :media_errors
end
##
# set_validate_methods
# Define merge_media_errors to merge medium errors with errors given
# on master object.
#
def set_validate_methods
module_eval <<-"end;", __FILE__, __LINE__
def merge_media_errors
self.media_errors ||= []
self.media_errors.each do |error|
self.errors.add(:base, error)
end
end
end;
end
##
# set_media_links_relation
# Declare media_links relation
def set_media_links_relation
has_many :media_links, :as => :mediated, :dependent => :destroy
end
##
# create_one_accessors
# Create needed accessors on master object for unique relation
#
# @param [String] context
# @param [Hash] options
#
##
# create_many_accessors
# Create needed accessors on master object for multiple relation
#
# @param [String] context
# @param [Hash] options
#
def create_many_accessors(context, options)
define_method(context.to_s.pluralize) do
media.with_context(context.to_sym).uniq
end
module_eval <<-"end;", __FILE__, __LINE__
def #{context}=(values)
return if values.blank?
Array(values).each do |value|
next if value.nil?
medium = Medium.new_from_value(self, value, "#{context}", "#{options[:encode]}", "#{options[:only]}")
media << medium if medium
end
end
end;
end
end
|
wvanbergen/request-log-analyzer | lib/request_log_analyzer/source/log_parser.rb | RequestLogAnalyzer::Source.LogParser.parse_string | ruby | def parse_string(string, options = {}, &block)
parse_io(StringIO.new(string), options, &block)
end | Parses a string. It will simply call parse_io. This function does not support progress updates.
<tt>string</tt>:: The string that should be parsed.
<tt>options</tt>:: A Hash of options that will be pased to parse_io. | train | https://github.com/wvanbergen/request-log-analyzer/blob/b83865d440278583ac8e4901bb33878244fd7c75/lib/request_log_analyzer/source/log_parser.rb#L157-L159 | class LogParser < Base
include Enumerable
# The maximum number of bytes to read from a line.
DEFAULT_MAX_LINE_LENGTH = 8096
DEFAULT_LINE_DIVIDER = "\n"
# The default parse strategy that will be used to parse the input.
DEFAULT_PARSE_STRATEGY = 'assume-correct'
# All available parse strategies.
PARSE_STRATEGIES = ['cautious', 'assume-correct']
attr_reader :source_files, :current_file, :current_lineno, :processed_files
attr_reader :warnings, :parsed_lines, :parsed_requests, :skipped_lines, :skipped_requests
# Initializes the log file parser instance.
# It will apply the language specific FileFormat module to this instance. It will use the line
# definitions in this module to parse any input that it is given (see parse_io).
#
# <tt>format</tt>:: The current file format instance
# <tt>options</tt>:: A hash of options that are used by the parser
def initialize(format, options = {})
super(format, options)
@warnings = 0
@parsed_lines = 0
@parsed_requests = 0
@skipped_lines = 0
@skipped_requests = 0
@current_request = nil
@current_source = nil
@current_file = nil
@current_lineno = nil
@processed_files = []
@source_files = options[:source_files]
@progress_handler = nil
@warning_handler = nil
@options[:parse_strategy] ||= DEFAULT_PARSE_STRATEGY
unless PARSE_STRATEGIES.include?(@options[:parse_strategy])
fail "Unknown parse strategy: #{@options[@parse_strategy]}"
end
end
def max_line_length
file_format.max_line_length || DEFAULT_MAX_LINE_LENGTH
end
def line_divider
file_format.line_divider || DEFAULT_LINE_DIVIDER
end
# Reads the input, which can either be a file, sequence of files or STDIN to parse
# lines specified in the FileFormat. This lines will be combined into Request instances,
# that will be yielded. The actual parsing occurs in the parse_io method.
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
def each_request(options = {}, &block) # :yields: :request, request
case @source_files
when IO
if @source_files == $stdin
puts 'Parsing from the standard input. Press CTRL+C to finish.' # FIXME: not here
end
parse_stream(@source_files, options, &block)
when String
parse_file(@source_files, options, &block)
when Array
parse_files(@source_files, options, &block)
else
fail 'Unknown source provided'
end
end
# Make sure the Enumerable methods work as expected
alias_method :each, :each_request
# Parses a list of subsequent files of the same format, by calling parse_file for every
# file in the array.
# <tt>files</tt>:: The Array of files that should be parsed
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
def parse_files(files, options = {}, &block) # :yields: request
files.each { |file| parse_file(file, options, &block) }
end
# Check if a file has a compressed extention in the filename.
# If recognized, return the command string used to decompress the file
def decompress_file?(filename)
nice_command = 'nice -n 5'
return "#{nice_command} gunzip -c -d #{filename}" if filename.match(/\.tar.gz$/) || filename.match(/\.tgz$/) || filename.match(/\.gz$/)
return "#{nice_command} bunzip2 -c -d #{filename}" if filename.match(/\.bz2$/)
return "#{nice_command} unzip -p #{filename}" if filename.match(/\.zip$/)
''
end
# Parses a log file. Creates an IO stream for the provided file, and sends it to parse_io for
# further handling. This method supports progress updates that can be used to display a progressbar
#
# If the logfile is compressed, it is uncompressed to stdout and read.
# TODO: Check if IO.popen encounters problems with the given command line.
# TODO: Fix progress bar that is broken for IO.popen, as it returns a single string.
#
# <tt>file</tt>:: The file that should be parsed.
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
def parse_file(file, options = {}, &block)
if File.directory?(file)
parse_files(Dir["#{ file }/*"], options, &block)
return
end
@current_source = File.expand_path(file)
@source_changes_handler.call(:started, @current_source) if @source_changes_handler
if decompress_file?(file).empty?
@progress_handler = @dormant_progress_handler
@progress_handler.call(:started, file) if @progress_handler
File.open(file, 'rb') { |f| parse_io(f, options, &block) }
@progress_handler.call(:finished, file) if @progress_handler
@progress_handler = nil
@processed_files.push(@current_source.dup)
else
IO.popen(decompress_file?(file), 'rb') { |f| parse_io(f, options, &block) }
end
@source_changes_handler.call(:finished, @current_source) if @source_changes_handler
@current_source = nil
end
# Parses an IO stream. It will simply call parse_io. This function does not support progress updates
# because the length of a stream is not known.
# <tt>stream</tt>:: The IO stream that should be parsed.
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
def parse_stream(stream, options = {}, &block)
parse_io(stream, options, &block)
end
# Parses a string. It will simply call parse_io. This function does not support progress updates.
# <tt>string</tt>:: The string that should be parsed.
# <tt>options</tt>:: A Hash of options that will be pased to parse_io.
# This method loops over each line of the input stream. It will try to parse this line as any of
# the lines that are defined by the current file format (see RequestLogAnalyazer::FileFormat).
# It will then combine these parsed line into requests using heuristics. These requests (see
# RequestLogAnalyzer::Request) will then be yielded for further processing in the pipeline.
#
# - RequestLogAnalyzer::LineDefinition#matches is called to test if a line matches a line definition of the file format.
# - update_current_request is used to combine parsed lines into requests using heuristics.
# - The method will yield progress updates if a progress handler is installed using progress=
# - The method will yield parse warnings if a warning handler is installed using warning=
#
# This is a Ruby 1.9 specific version that offers memory protection.
#
# <tt>io</tt>:: The IO instance to use as source
# <tt>options</tt>:: A hash of options that can be used by the parser.
def parse_io_19(io, options = {}, &block) # :yields: request
@max_line_length = options[:max_line_length] || max_line_length
@line_divider = options[:line_divider] || line_divider
@current_lineno = 0
while line = io.gets(@line_divider, @max_line_length)
@current_lineno += 1
@progress_handler.call(:progress, io.pos) if @progress_handler && @current_lineno % 255 == 0
parse_line(line, &block)
end
warn(:unfinished_request_on_eof, 'End of file reached, but last request was not completed!') unless @current_request.nil?
@current_lineno = nil
end
# This method loops over each line of the input stream. It will try to parse this line as any of
# the lines that are defined by the current file format (see RequestLogAnalyazer::FileFormat).
# It will then combine these parsed line into requests using heuristics. These requests (see
# RequestLogAnalyzer::Request) will then be yielded for further processing in the pipeline.
#
# - RequestLogAnalyzer::LineDefinition#matches is called to test if a line matches a line definition of the file format.
# - update_current_request is used to combine parsed lines into requests using heuristics.
# - The method will yield progress updates if a progress handler is installed using progress=
# - The method will yield parse warnings if a warning handler is installed using warning=
#
# This is a Ruby 1.8 specific version that doesn't offer memory protection.
#
# <tt>io</tt>:: The IO instance to use as source
# <tt>options</tt>:: A hash of options that can be used by the parser.
def parse_io_18(io, options = {}, &block) # :yields: request
@line_divider = options[:line_divider] || line_divider
@current_lineno = 0
while line = io.gets(@line_divider)
@current_lineno += 1
@progress_handler.call(:progress, io.pos) if @progress_handler && @current_lineno % 255 == 0
parse_line(line, &block)
end
warn(:unfinished_request_on_eof, 'End of file reached, but last request was not completed!') unless @current_request.nil?
@current_lineno = nil
end
alias_method :parse_io, RUBY_VERSION.to_f < 1.9 ? :parse_io_18 : :parse_io_19
# Parses a single line using the current file format. If successful, use the parsed
# information to build a request
# <tt>line</tt>:: The line to parse
# <tt>block</tt>:: The block to send fully parsed requests to.
def parse_line(line, &block) # :yields: request
if request_data = file_format.parse_line(line) { |wt, message| warn(wt, message) }
@parsed_lines += 1
update_current_request(request_data.merge(source: @current_source, lineno: @current_lineno), &block)
end
end
# Add a block to this method to install a progress handler while parsing.
# <tt>proc</tt>:: The proc that will be called to handle progress update messages
def progress=(proc)
@dormant_progress_handler = proc
end
# Add a block to this method to install a warning handler while parsing,
# <tt>proc</tt>:: The proc that will be called to handle parse warning messages
def warning=(proc)
@warning_handler = proc
end
# Add a block to this method to install a source change handler while parsing,
# <tt>proc</tt>:: The proc that will be called to handle source changes
def source_changes=(proc)
@source_changes_handler = proc
end
# This method is called by the parser if it encounteres any parsing problems.
# It will call the installed warning handler if any.
#
# By default, RequestLogAnalyzer::Controller will install a warning handler
# that will pass the warnings to each aggregator so they can do something useful
# with it.
#
# <tt>type</tt>:: The warning type (a Symbol)
# <tt>message</tt>:: A message explaining the warning
def warn(type, message)
@warnings += 1
@warning_handler.call(type, message, @current_lineno) if @warning_handler
end
protected
# Combines the different lines of a request into a single Request object. It will start a
# new request when a header line is encountered en will emit the request when a footer line
# is encountered.
#
# Combining the lines is done using heuristics. Problems can occur in this process. The
# current parse strategy defines how these cases are handled.
#
# When using the 'assume-correct' parse strategy (default):
# - Every line that is parsed before a header line is ignored as it cannot be included in
# any request. It will emit a :no_current_request warning.
# - If a header line is found before the previous requests was closed, the previous request
# will be yielded and a new request will be started.
#
# When using the 'cautious' parse strategy:
# - Every line that is parsed before a header line is ignored as it cannot be included in
# any request. It will emit a :no_current_request warning.
# - A header line that is parsed before a request is closed by a footer line, is a sign of
# an unproperly ordered file. All data that is gathered for the request until then is
# discarded and the next request is ignored as well. An :unclosed_request warning is
# emitted.
#
# <tt>request_data</tt>:: A hash of data that was parsed from the last line.
def update_current_request(request_data, &block) # :yields: request
if alternative_header_line?(request_data)
if @current_request
@current_request << request_data
else
@current_request = @file_format.request(request_data)
end
elsif header_line?(request_data)
if @current_request
case options[:parse_strategy]
when 'assume-correct'
handle_request(@current_request, &block)
@current_request = @file_format.request(request_data)
when 'cautious'
@skipped_lines += 1
warn(:unclosed_request, "Encountered header line (#{request_data[:line_definition].name.inspect}), but previous request was not closed!")
@current_request = nil # remove all data that was parsed, skip next request as well.
end
elsif footer_line?(request_data)
handle_request(@file_format.request(request_data), &block)
else
@current_request = @file_format.request(request_data)
end
else
if @current_request
@current_request << request_data
if footer_line?(request_data)
handle_request(@current_request, &block) # yield @current_request
@current_request = nil
end
else
@skipped_lines += 1
warn(:no_current_request, "Parseable line (#{request_data[:line_definition].name.inspect}) found outside of a request!")
end
end
end
# Handles the parsed request by sending it into the pipeline.
#
# - It will call RequestLogAnalyzer::Request#validate on the request instance
# - It will send the request into the pipeline, checking whether it was accepted by all the filters.
# - It will update the parsed_requests and skipped_requests variables accordingly
#
# <tt>request</tt>:: The parsed request instance (RequestLogAnalyzer::Request)
def handle_request(request, &_block) # :yields: :request, request
@parsed_requests += 1
request.validate
accepted = block_given? ? yield(request) : true
@skipped_requests += 1 unless accepted
end
# Checks whether a given line hash is an alternative header line according to the current file format.
# <tt>hash</tt>:: A hash of data that was parsed from the line.
def alternative_header_line?(hash)
hash[:line_definition].header == :alternative
end
# Checks whether a given line hash is a header line according to the current file format.
# <tt>hash</tt>:: A hash of data that was parsed from the line.
def header_line?(hash)
hash[:line_definition].header == true
end
# Checks whether a given line hash is a footer line according to the current file format.
# <tt>hash</tt>:: A hash of data that was parsed from the line.
def footer_line?(hash)
hash[:line_definition].footer
end
end
|
PierreRambaud/gemirro | lib/gemirro/configuration.rb | Gemirro.Configuration.define_source | ruby | def define_source(name, url, &block)
source = Source.new(name, url)
source.instance_eval(&block)
@source = source
end | Define the source to mirror.
@param [String] name
@param [String] url
@param [Proc] block | train | https://github.com/PierreRambaud/gemirro/blob/5c6b5abb5334ed3beb256f6764bc336e2cf2dc21/lib/gemirro/configuration.rb#L202-L207 | class Configuration < Confstruct::Configuration
attr_accessor :source
attr_writer :logger
LOGGER_LEVEL = {
'debug' => Logger::DEBUG,
'warning' => Logger::WARN,
'info' => Logger::INFO,
'unknown' => Logger::UNKNOWN,
'error' => Logger::ERROR,
'fatal' => Logger::FATAL
}.freeze
##
# Returns the logger
#
# @return [Logger]
#
def logger
@logger ||= Logger.new(STDOUT)
end
##
# Set log level
#
# @param [string]
#
# @return [Logger]
#
def logger_level=(level)
logger.level = LOGGER_LEVEL[level] if LOGGER_LEVEL.key?(level)
logger
end
##
# Returns the template path to init directory
#
# @return [String]
#
def self.template_directory
File.expand_path('../../../template', __FILE__)
end
##
# Returns the views path to render templates
#
# @return [String]
#
def self.views_directory
File.expand_path('../../../views', __FILE__)
end
##
# Returns default configuration file path
#
# @return [String]
#
def self.default_configuration_file
File.expand_path('config.rb', Dir.pwd)
end
##
# Returns the name of the directory that contains the quick
# specification files.
#
# @return [String]
#
def self.marshal_identifier
"Marshal.#{marshal_version}"
end
##
# Returns the name of the file that contains an index of all the versions.
#
# @return [String]
#
def self.versions_file
"specs.#{marshal_version}.gz"
end
##
# Returns the name of the file that contains an index
# of all the prerelease versions.
#
# @return [String]
#
def self.prerelease_versions_file
"prerelease_specs.#{marshal_version}.gz"
end
##
# Returns a String containing the Marshal version.
#
# @return [String]
#
def self.marshal_version
"#{Marshal::MAJOR_VERSION}.#{Marshal::MINOR_VERSION}"
end
##
# Return mirror directory
#
# @return [Gemirro::MirrorDirectory]
#
def mirror_gems_directory
@mirror_gems_directory ||= MirrorDirectory.new(gems_directory)
end
##
# Returns gems directory
#
# @return [String]
#
def gems_directory
File.join(destination.to_s, 'gems')
end
##
# Return mirror directory
#
# @return [Gemirro::MirrorDirectory]
#
def mirror_gemspecs_directory
@mirror_gemspecs_directory ||= MirrorDirectory.new(gemspecs_directory)
end
##
# Returns gems directory
#
# @return [String]
#
def gemspecs_directory
File.join(destination.to_s, 'quick', self.class.marshal_identifier)
end
##
# Returns a Hash containing various Gems to ignore and their versions.
#
# @return [Hash]
#
def ignored_gems
@ignored_gems ||= Hash.new { |hash, key| hash[key] = {} }
end
##
# Adds a Gem to the list of Gems to ignore.
#
# @param [String] name
# @param [String] version
#
def ignore_gem(name, version, platform)
ignored_gems[platform] ||= {}
ignored_gems[platform][name] ||= []
ignored_gems[platform][name] << version
end
##
# Checks if a Gem should be ignored.
#
# @param [String] name
# @param [String] version
# @return [TrueClass|FalseClass]
#
def ignore_gem?(name, version, platform)
if ignored_gems[platform][name]
ignored_gems[platform][name].include?(version)
else
false
end
end
##
# Define the source to mirror.
#
# @param [String] name
# @param [String] url
# @param [Proc] block
#
end
|
shadowbq/snort-thresholds | lib/threshold/thresholds.rb | Threshold.Thresholds.valid? | ruby | def valid?
begin
self.each do |threshold|
if threshold.respond_to?(:valid?)
return false unless threshold.valid?
else
raise InvalidThresholdsObject, "Container object has unknown objects"
end
end
return true
rescue InvalidThresholdsObject
return false
end
end | Check if all objects in the Threshold Instance report .valid? | train | https://github.com/shadowbq/snort-thresholds/blob/e3e9d1b10c2460846e1779fda67e8bec0422f53e/lib/threshold/thresholds.rb#L64-L77 | class Thresholds
extend Forwardable
attr_accessor :file, :readonly
def_delegators :@thresholds, :<<, :length, :push, :pop, :first, :last, :<=>, :==, :clear, :[], :[]=, :shift, :unshift, :each, :sort!, :shuffle!, :collect!, :map!, :reject!, :delete_if, :select!, :keep_if, :index, :include?
def initialize(thresholds = [])
@thresholds = thresholds
end
# Write changes to the file
def flush
begin
valid_existing_file?(@file)
raise ReadOnlyThresholdsFile if @readonly
hash = current_hash
file = File.open(@file, 'w+')
raise ThresholdAtomicLockFailure, 'The @file state/hash changed before we could flush the file' unless stored_hash == hash
file.write self.sort.to_s
file.close
rescue NonExistantThresholdFile
raise ReadOnlyThresholdsFile if @readonly
file = File.open(@file, 'w')
file.write self.sort.to_s
file.close
end
stored_hash=current_hash
return true
end
# Clears current collection and Read in the thresholds.conf file
def loadfile!
@thresholds.clear
loadfile
end
# Append in the thresholds.conf file to current collection
def loadfile
valid_existing_file?(@file)
results = Threshold::Parser.new(@file)
@stored_hash= results.filehash
#puts stored_hash
results.caps.each do |result|
builder = Threshold::Builder.new(result)
self << builder.build
end
end
# Check if all objects in the Threshold Instance report .valid?
# Printer
# Pass (true) to_s to skip the printing of InternalObjects.comment
def to_s(skip = false)
output = ""
raise InvalidThresholdsObject, "Container object has unknown objects" unless valid?
self.each do |threshold|
output << threshold.to_s(skip) + "\n"
end
return output
end
# The calculated hash of the threshold.conf file at load time.
def stored_hash
@stored_hash
end
def to_a
@thresholds
end
## Forwardable Corrections:
## Corrected for forwardable due to Core Array returning new Arrays on the methods.
# Array(@thresholds) Creates a new Array on @threshold.sort so.. direct forwardable delegation fails.
# Returns a new Threshold Object
def sort
Thresholds.new(@thresholds.sort)
end
# Returns a new Threshold Object
def reverse
Thresholds.new(@thresholds.reverse)
end
# Returns a new Threshold Object
def shuffle
Thresholds.new(@thresholds.shuffle)
end
# Returns a new Threshold Object
def reject(&blk)
if block_given?
Thresholds.new(@thresholds.reject(&blk))
else
Thresholds.new(@thresholds.reject)
end
end
# Returns a new Threshold Object
def select(&blk)
if block_given?
Thresholds.new(@thresholds.select(&blk))
else
Thresholds.new(@thresholds.select)
end
end
#Uniques by default to printable output
# Returns a new Threshold Object
def uniq(&blk)
if block_given?
Thresholds.new(@thresholds.uniq(&blk))
else
Thresholds.new(@thresholds.uniq{ |lineitem| lineitem.to_s(true) })
end
end
## Complex SET Methods
## &(union), | (intersect), + (concat), - (Difference)
# + (concat)
# Returns a new Threshold Object
def +(an0ther)
Thresholds.new(@thresholds + an0ther.to_a)
end
# | (intersect)
# Returns a new Threshold Object
def |(an0ther)
Thresholds.new(@thresholds | an0ther.to_a)
end
# & (union)
# Returns a new Threshold Object
def &(an0ther)
Thresholds.new(@thresholds & an0ther.to_a)
end
# - (Difference)
# Returns a new Threshold Object
def -(an0ther)
Thresholds.new(@thresholds - an0ther.to_a)
end
# Returns a new Threshold Object with just suppressions
def suppressions(&blk)
if block_given?
self.suppressions.select(&blk)
else
Thresholds.new(@thresholds.select{|t| t.class.to_s == "Threshold::Suppression"})
end
end
# Returns a new Threshold Object with just event_filters
def event_filters(&blk)
if block_given?
self.event_filters.select(&blk)
else
Thresholds.new(@thresholds.select{|t| t.class.to_s == "Threshold::EventFilter"})
end
end
# Returns a new Threshold Object with just rate_filters
def rate_filters(&blk)
if block_given?
self.rate_filters.select(&blk)
else
Thresholds.new(@thresholds.select{|t| t.class.to_s == "Threshold::RateFilter"})
end
end
private
def stored_hash=(foo)
@stored_hash=foo
end
def current_hash
file = File.open(@file, 'rb+')
file.flock(File::LOCK_EX)
hash = Digest::MD5.file @file
file.close
return hash
end
def valid_existing_file?(file)
if file !=nil
raise NonExistantThresholdFile, "Missing threshold.conf" unless (File.file?(file) and File.exists?(file))
else
raise MissingThresholdFileConfiguration, "Missing threshold.conf path. See README for Usage."
end
return true
end
end
|
motion-kit/motion-kit | lib/motion-kit-osx/helpers/nswindow_frame_helpers.rb | MotionKit.NSWindowHelpers.above | ruby | def above(from_window, f={})
_calculate_frame(f, from: from_window, relative_to: { x: :reset, y: :above })
end | The first arg can be a window or a frame
@example | train | https://github.com/motion-kit/motion-kit/blob/fa01dd08497b0dd01090156e58552be9d3b25ef1/lib/motion-kit-osx/helpers/nswindow_frame_helpers.rb#L330-L332 | class NSWindowHelpers
def _fix_frame_value(value)
if value.is_a?(Hash) && value[:relative]
return value.merge(flipped: true)
end
return value
end
def frame(value, autosave_name=nil)
value = _fix_frame_value(value)
screen = target.screen || NSScreen.mainScreen
value = MotionKit.calculate(target, :frame, value, screen)
target.setFrame(value, display: true)
if autosave_name
target.setFrameAutosaveName(autosave_name)
end
return target.frame
end
def x(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.origin.x = MotionKit.calculate(target, :width, value, screen)
target.setFrame(f, display: true)
return CGRectGetMinX(f)
end
alias left x
def right(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.origin.x = MotionKit.calculate(target, :width, value, screen) - f.size.width
target.setFrame(f, display: true)
return CGRectGetMaxX(f)
end
def center_x(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.origin.x = MotionKit.calculate(target, :width, value, screen)
f.origin.x -= f.size.width / 2
target.setFrame(f, display: true)
return CGRectGetMidX(target.frame)
end
alias middle_x center_x
def y(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.origin.y = MotionKit.calculate(target, :height, value, screen)
target.setFrame(f, display: true)
return CGRectGetMinY(f)
end
def bottom(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.origin.y = MotionKit.calculate(target, :height, value, screen)
target.setFrame(f, display: true)
return CGRectGetMinY(target.frame)
end
def top(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.origin.y = MotionKit.calculate(target, :height, value, screen)
f.origin.y -= f.size.height
target.setFrame(f, display: true)
return CGRectGetMaxY(f)
end
def center_y(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.origin.y = MotionKit.calculate(target, :height, value, screen)
f.origin.y -= f.size.height / 2
target.setFrame(f, display: true)
return CGRectGetMidY(target.frame)
end
alias middle_y center_y
def width(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.size.width = MotionKit.calculate(target, :width, value, screen)
target.setFrame(f, display: true)
return CGRectGetWidth(f)
end
alias w width
def height(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.size.height = MotionKit.calculate(target, :height, value, screen)
target.setFrame(f, display: true)
return CGRectGetHeight(f)
end
alias h height
def origin(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.origin = MotionKit.calculate(target, :origin, value, screen)
target.setFrame(f, display: true)
return target.frame.origin
end
def center(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
center = MotionKit.calculate(target, :center, value, screen)
origin = CGPoint.new(center.x, center.y)
origin.x -= f.size.width / 2
origin.y -= f.size.height / 2
f.origin = origin
target.setFrame(f, display: true)
return center
end
alias middle center
def size(value)
value = _fix_frame_value(value)
f = target.frame
screen = target.screen || NSScreen.mainScreen
f.size = MotionKit.calculate(target, :size, value, screen)
target.setFrame(f, display: true)
return target.frame.size
end
def _calculate_frame(f, from: from_window, relative_to: point)
if from_window.is_a?(Symbol)
from_window = self.get_view(from_window)
end
from_window_size = from_window.frame.size
calculate_window = target
if point[:x] == :reset || point[:y] == :reset
calculate_window = NSWindow.alloc.init
calculate_window.setFrame([[0, 0], target.frame.size], display: false)
end
if f.is_a?(Hash)
f = f.merge(relative: true, flipped: true)
end
f = MotionKit.calculate(calculate_window, :frame, f, from_window)
if from_window.is_a?(NSWindow)
f.origin.x += from_window.frame.origin.x
f.origin.y += from_window.frame.origin.y
end
case point[:x]
when :min, :reset
# pass
when :mid
f.origin.x += (from_window_size.width - f.size.width) / 2.0
when :max
f.origin.x += from_window_size.width - f.size.width
when :before
f.origin.x -= f.size.width
when :after
f.origin.x += from_window_size.width
else
f.origin.x += point[:x]
end
case point[:y]
when :reset, :min
# pass
when :mid
f.origin.y += (from_window_size.height - f.size.height) / 2.0
when :max
f.origin.y += from_window_size.height - f.size.height
when :above
f.origin.y += from_window_size.height
when :below
f.origin.y -= f.size.height
else
f.origin.y += point[:y]
end
return f
end
# The first arg can be a window or a frame
# @example
# frame from_top_left(width: 80, height: 22)
# frame from_top_left(another_view, width: 80, height: 22)
def from_top_left(from_window=nil, f=nil)
if from_window.is_a?(Hash)
f = from_window
from_window = nil
end
f ||= {}
from_window ||= target.screen || NSScreen.mainScreen
_calculate_frame(f, from: from_window, relative_to: { x: :min, y: :max })
end
# The first arg can be a window or a frame
# @example
# frame from_top(width: 80, height: 22)
# frame from_top(another_view, width: 80, height: 22)
def from_top(from_window=nil, f=nil)
if from_window.is_a?(Hash)
f = from_window
from_window = nil
end
f ||= {}
from_window ||= target.screen || NSScreen.mainScreen
_calculate_frame(f, from: from_window, relative_to: { x: :mid, y: :max })
end
# The first arg can be a window or a frame
# @example
# frame from_top_right(width: 80, height: 22)
# frame from_top_right(another_view, width: 80, height: 22)
def from_top_right(from_window=nil, f=nil)
if from_window.is_a?(Hash)
f = from_window
from_window = nil
end
f ||= {}
from_window ||= target.screen || NSScreen.mainScreen
_calculate_frame(f, from: from_window, relative_to: { x: :max, y: :max })
end
# The first arg can be a window or a frame
# @example
# frame from_left(width: 80, height: 22)
# frame from_left(another_view, width: 80, height: 22)
def from_left(from_window=nil, f=nil)
if from_window.is_a?(Hash)
f = from_window
from_window = nil
end
f ||= {}
from_window ||= target.screen || NSScreen.mainScreen
_calculate_frame(f, from: from_window, relative_to: { x: :min, y: :mid })
end
# The first arg can be a window or a frame
# @example
# frame from_center(width: 80, height: 22)
# frame from_center(another_view, width: 80, height: 22)
def from_center(from_window=nil, f=nil)
if from_window.is_a?(Hash)
f = from_window
from_window = nil
end
f ||= {}
from_window ||= target.screen || NSScreen.mainScreen
_calculate_frame(f, from: from_window, relative_to: { x: :mid, y: :mid })
end
# The first arg can be a window or a frame
# @example
# frame from_right(width: 80, height: 22)
# frame from_right(another_view, width: 80, height: 22)
def from_right(from_window=nil, f=nil)
if from_window.is_a?(Hash)
f = from_window
from_window = nil
end
f ||= {}
from_window ||= target.screen || NSScreen.mainScreen
_calculate_frame(f, from: from_window, relative_to: { x: :max, y: :mid })
end
# The first arg can be a window or a frame
# @example
# frame from_bottom_left(width: 80, height: 22)
# frame from_bottom_left(another_view, width: 80, height: 22)
def from_bottom_left(from_window=nil, f=nil)
if from_window.is_a?(Hash)
f = from_window
from_window = nil
end
f ||= {}
from_window ||= target.screen || NSScreen.mainScreen
_calculate_frame(f, from: from_window, relative_to: { x: :min, y: :min })
end
# The first arg can be a window or a frame
# @example
# frame from_bottom(width: 80, height: 22)
# frame from_bottom(another_view, width: 80, height: 22)
def from_bottom(from_window=nil, f=nil)
if from_window.is_a?(Hash)
f = from_window
from_window = nil
end
f ||= {}
from_window ||= target.screen || NSScreen.mainScreen
_calculate_frame(f, from: from_window, relative_to: { x: :mid, y: :min })
end
# The first arg can be a window or a frame
# @example
# frame from_bottom_right(width: 80, height: 22)
# frame from_bottom_right(another_view, width: 80, height: 22)
def from_bottom_right(from_window=nil, f=nil)
if from_window.is_a?(Hash)
f = from_window
from_window = nil
end
f ||= {}
from_window ||= target.screen || NSScreen.mainScreen
_calculate_frame(f, from: from_window, relative_to: { x: :max, y: :min })
end
# The first arg can be a window or a frame
# @example
# The first arg can be a window or a frame
# @example
def below(from_window, f={})
_calculate_frame(f, from: from_window, relative_to: { x: :reset, y: :below })
end
# The first arg can be a window or a frame
# @example
def before(from_window, f={})
_calculate_frame(f, from: from_window, relative_to: { x: :before, y: :reset })
end
alias left_of before
# The first arg can be a window or a frame
# @example
def after(from_window, f={})
_calculate_frame(f, from: from_window, relative_to: { x: :after, y: :reset })
end
alias right_of after
# The first arg must be a view
# @example
def relative_to(from_window, f)
_calculate_frame(f, from: from_window, relative_to: { x: :reset, y: :reset })
end
end
|
giraffi/zcloudjp | lib/zcloudjp/utils.rb | Zcloudjp.Utils.parse_params | ruby | def parse_params(params, key_word)
body = params.has_key?(:path) ? load_file(params[:path], key_word) : params
body = { key_word => body } unless body.has_key?(key_word.to_sym)
body
end | Parses given params or file and returns Hash including the given key. | train | https://github.com/giraffi/zcloudjp/blob/0ee8dd49cf469fd182a48856fae63f606a959de5/lib/zcloudjp/utils.rb#L7-L11 | module Utils
# Parses given params or file and returns Hash including the given key.
# Loads a specified file and returns Hash including the given key.
def load_file(path, key_word)
begin
data = MultiJson.load(IO.read(File.expand_path(path)), symbolize_keys: true)
rescue RuntimeError, Errno::ENOENT => e
raise e.message
rescue MultiJson::LoadError => e
raise e.message
end
if data.has_key?(key_word)
data[key_word].map { |k,v| data[key_word][k] = v } if data[key_word].is_a? Hash
end
data
end
end
|
HewlettPackard/hpe3par_ruby_sdk | lib/Hpe3parSdk/client.rb | Hpe3parSdk.Client.logout | ruby | def logout
unless @log_file_path.nil?
if Hpe3parSdk.logger != nil
Hpe3parSdk.logger.close
Hpe3parSdk.logger = nil
end
end
begin
@http.unauthenticate
rescue Hpe3parSdk::HPE3PARException => ex
#Do nothing
end
end | Logout from the 3PAR Array | train | https://github.com/HewlettPackard/hpe3par_ruby_sdk/blob/f8cfc6e597741be593cf7fe013accadf982ee68b/lib/Hpe3parSdk/client.rb#L2633-L2645 | class Client
def initialize(api_url,debug:false, secure: false, timeout: nil, suppress_ssl_warnings: false, app_type: 'ruby_SDK_3par', log_file_path: nil)
unless api_url.is_a?(String)
raise Hpe3parSdk::HPE3PARException.new(nil,
"'api_url' parameter is mandatory and should be of type String")
end
@api_url = api_url
@debug = debug
@secure = secure
@timeout = timeout
@suppress_ssl_warnings = suppress_ssl_warnings
@log_level = Logger::INFO
@log_file_path = log_file_path
init_log
@http = HTTPJSONRestClient.new(
@api_url, @secure, @debug,
@suppress_ssl_warnings, @timeout = nil
)
check_WSAPI_version
@vlun_query_supported = false
@cpg = CPGManager.new(@http)
@qos = QOSManager.new(@http)
@flash_cache = FlashCacheManager.new(@http)
@port = PortManager.new(@http)
@task = TaskManager.new(@http)
@host_and_vv_set_filter_supported = false
@ssh = nil
@vlun = VlunManager.new(@http, @vlun_query_supported)
@host = HostManager.new(@http, @vlun_query_supported)
@volume_set = VolumeSetManager.new(@http, @host_and_vv_set_filter_supported)
@host_set = HostSetManager.new(@http, @host_and_vv_set_filter_supported)
@app_type = app_type
end
private def init_log
unless @log_file_path.nil?
client_logger = Logger.new(@log_file_path, 'daily', formatter: CustomFormatter.new)
else
client_logger = Logger.new(STDOUT)
end
if @debug
@log_level = Logger::DEBUG
end
Hpe3parSdk.logger = MultiLog.new(:level => @log_level, :loggers => client_logger)
end
private def check_WSAPI_version
begin
@api_version = get_ws_api_version
rescue HPE3PARException => ex
ex_message = ex.message
if ex_message && ex_message.include?('SSL Certificate Verification Failed')
raise Hpe3parSdk::SSLCertFailed
else
msg = "Error: #{ex_message} - Error communicating with 3PAR WSAPI. '
'Check proxy settings. If error persists, either the '
'3PAR WSAPI is not running OR the version of the WSAPI is '
'not supported."
raise Hpe3parSdk::HPE3PARException(message: msg)
end
end
compare_version(@api_version)
end
private def set_ssh_options(username, password, port=22, conn_timeout=nil)
@ssh=Hpe3parSdk::SSH.new(@api_url.split("//")[1].split(":")[0], username, password)
end
private def compare_version(api_version)
@min_version = WSAPIVersion
.parse(WSAPIVersionSupport::WSAPI_MIN_SUPPORTED_VERSION)
@min_version_with_compression = WSAPIVersion
.parse(WSAPIVersionSupport::WSAPI_MIN_VERSION_COMPRESSION_SUPPORT)
@current_version = WSAPIVersion.new(api_version['major'], api_version['minor'],
api_version['revision'])
if @current_version < @min_version
err_msg = "Unsupported 3PAR WS API version #{@current_version}, min supported version is, #{WSAPIVersionSupport::WSAPI_MIN_SUPPORTED_VERSION}"
raise Hpe3parSdk::UnsupportedVersion.new(nil, err_msg)
end
# Check for VLUN query support.
min_vlun_query_support_version = WSAPIVersion
.parse(WSAPIVersionSupport::WSAPI_MIN_VERSION_VLUN_QUERY_SUPPORT)
if @current_version >= min_vlun_query_support_version
@vlun_query_supported = true
end
# Check for Host and VV Set query support
if @current_version >= @min_version_with_compression
@host_and_vv_set_filter_supported = true
end
end
# Get the 3PAR WS API version.
#
# ==== Returns
#
# WSAPI version hash
def get_ws_api_version
# remove everything down to host:port
host_url = @api_url.split('/api')
@http.set_url(host_url[0])
begin
# get the api version
response = @http.get('/api')
response[1]
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
ensure
# reset the url
@http.set_url(@api_url)
end
# Gets the WSAPI Configuration.
#
# ==== Returns
#
# WSAPI configuration hash
def get_ws_api_configuration_info
begin
response = @http.get('/wsapiconfiguration')
response[1]
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new FlashCache
#
# ==== Attributes
#
# * size_in_gib - Specifies the node pair size of the Flash Cache on the system
# type size_in_gib: Integer
# * mode - Values supported Simulator: 1, Real: 2 (default)
# type mode: Integer
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Not enough space is available for the operation.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_RANGE - A JSON input object contains a name-value pair with a numeric value that exceeds the expected range. Flash Cache exceeds the expected range. The HTTP ref member contains the name.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_FLASH_CACHE - The Flash Cache already exists.
# * Hpe3parSdk::HTTPForbidden
# - FLASH_CACHE_NOT_SUPPORTED - Flash Cache is not supported.
# * Hpe3parSdk::HTTPBadRequest
# - INV_FLASH_CACHE_SIZE - Invalid Flash Cache size. The size must be a multiple of 16 G.
def create_flash_cache(size_in_gib, mode = nil)
begin
@flash_cache.create_flash_cache(size_in_gib, mode)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Get Flash Cache information
#
# ==== Returns
#
# FlashCache - Details of the specified flash cache
def get_flash_cache
begin
@flash_cache.get_flash_cache
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes an existing Flash Cache
#
# ==== Raises
#
# * Hpe3parSdk::HTTPForbidden
# - FLASH_CACHE_IS_BEING_REMOVED - Unable to delete the Flash Cache, the Flash Cache is being removed.
# * Hpe3parSdk::HTTPForbidden
# - FLASH_CACHE_NOT_SUPPORTED - Flash Cache is not supported on this system.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_FLASH_CACHE - The Flash Cache does not exist.
def delete_flash_cache
begin
@flash_cache.delete_flash_cache
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the Storage System Information
#
# ==== Returns
#
# Hash of Storage System Info
def get_storage_system_info
begin
response = @http.get('/system')
response[1]
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the overall system capacity for the 3PAR server.
#
# ==== Returns
#
# Hash of system capacity information
#
#
# capacity = {
# "allCapacity"=> { # Overall system capacity
# # includes FC, NL, SSD
# # device types
# "totalMiB"=>20054016, # Total system capacity
# # in MiB
# "allocated"=>{ # Allocated space info
# "totalAllocatedMiB"=>12535808, # Total allocated
# # capacity
# "volumes"=> { # Volume capacity info
# "totalVolumesMiB"=>10919936, # Total capacity
# # allocated to volumes
# "nonCPGsMiB"=> 0, # Total non-CPG capacity
# "nonCPGUserMiB"=> 0, # The capacity allocated
# # to non-CPG user space
# "nonCPGSnapshotMiB"=>0, # The capacity allocated
# # to non-CPG snapshot
# # volumes
# "nonCPGAdminMiB"=> 0, # The capacity allocated
# # to non-CPG
# # administrative volumes
# "CPGsMiB"=>10919936, # Total capacity
# # allocated to CPGs
# "CPGUserMiB"=>7205538, # User CPG space
# "CPGUserUsedMiB"=>7092550, # The CPG allocated to
# # user space that is
# # in use
# "CPGUserUnusedMiB"=>112988, # The CPG allocated to
# # user space that is not
# # in use
# "CPGSnapshotMiB"=>2411870, # Snapshot CPG space
# "CPGSnapshotUsedMiB"=>210256, # CPG allocated to
# # snapshot that is in use
# "CPGSnapshotUnusedMiB"=>2201614, # CPG allocated to
# # snapshot space that is
# # not in use
# "CPGAdminMiB"=>1302528, # Administrative volume
# # CPG space
# "CPGAdminUsedMiB"=> 115200, # The CPG allocated to
# # administrative space
# # that is in use
# "CPGAdminUnusedMiB"=>1187328, # The CPG allocated to
# # administrative space
# # that is not in use
# "unmappedMiB"=>0 # Allocated volume space
# # that is unmapped
# },
# "system"=> { # System capacity info
# "totalSystemMiB"=> 1615872, # System space capacity
# "internalMiB"=>780288, # The system capacity
# # allocated to internal
# # resources
# "spareMiB"=> 835584, # Total spare capacity
# "spareUsedMiB"=> 0, # The system capacity
# # allocated to spare resources
# # in use
# "spareUnusedMiB"=> 835584 # The system capacity
# # allocated to spare resources
# # that are unused
# }
# },
# "freeMiB"=> 7518208, # Free capacity
# "freeInitializedMiB"=> 7518208, # Free initialized capacity
# "freeUninitializedMiB"=> 0, # Free uninitialized capacity
# "unavailableCapacityMiB"=> 0, # Unavailable capacity in MiB
# "failedCapacityMiB"=> 0 # Failed capacity in MiB
# },
# "FCCapacity"=> { # System capacity from FC devices only
# ... # Same structure as above
# },
# "NLCapacity"=> { # System capacity from NL devices only
# ... # Same structure as above
# },
# "SSDCapacity"=> { # System capacity from SSD devices only
# ... # Same structure as above
# }
# }
def get_overall_system_capacity
begin
response = @http.get('/capacity')
response[1]
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# This authenticates against the 3PAR WSAPI server and creates a session.
# ==== Attributes
#
# * username - The username
# type username: String
# * password - The Password
# type password: String
def login(username, password, optional = nil)
set_ssh_options(username, password, port=22, conn_timeout=nil)
@volume = VolumeManager.new(@http, @ssh, @app_type)
@http.authenticate(username, password, optional)
end
# Get the list of all 3PAR Tasks
#
# ==== Returns
#
# Array of Task
def get_all_tasks
begin
@task.get_all_tasks
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Get the status of a 3PAR Task
#
# ==== Attributes
#
# * task_id - the task id
# type task_id: Integer
#
# ==== Returns
#
# Task
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BELOW_RANGE - Bad Request Task ID must be a positive value.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_RANGE - Bad Request Task ID is too large.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_TASK - Task with the specified Task ID does not exist.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_WRONG_TYPE - Task ID is not an integer.
def get_task(task_id)
begin
@task.get_task(task_id)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def vlun_exists?(volname,lunid,host=nil,port=nil)
begin
@vlun.vlun_exists?(volname,lunid,host,port)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new VLUN.
#
# When creating a VLUN, the volumeName is required. The lun member is
# not required if auto is set to True.
# Either hostname or portPos (or both in the case of matched sets) is
# also required. The noVcn and overrideLowerPriority members are
# optional.
# * volume_name: Name of the volume to be exported
# type volume_name: String
# * lun: LUN id
# type lun: Integer
# * host_name: Name of the host which the volume is to be exported.
# type host_name: String
# * port_pos: System port of VLUN exported to. It includes node number, slot number, and card port number
# type port_pos: Hash
# port_pos = {'node'=> 1, # System node (0-7)
# 'slot'=> 2, # PCI bus slot in the node (0-5)
# 'port'=> 1} # Port number on the FC card (0-4)
# * no_vcn: A VLUN change notification (VCN) not be issued after export (-novcn).
# type no_vcn: Boolean
# * override_lower_priority: Existing lower priority VLUNs will be overridden (-ovrd). Use only if hostname member exists.
# type override_lower_priority: Boolean
#
# ==== Returns
#
# VLUN id
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ MISSING_REQUIRED - Missing volume or hostname or lunid.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL MISSING_REQUIRED - Specified volume does not exist.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - Specified hostname not found.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_PORT - Specified port does not exist.
def create_vlun(volume_name, lun = nil, host_name = nil, port_pos = nil, no_vcn = false, override_lower_priority = false, auto = false)
begin
@vlun.create_vlun(volume_name, host_name, lun, port_pos, no_vcn, override_lower_priority, auto)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets VLUNs.
#
# ==== Returns
#
# Array of VLUN objects
def get_vluns
begin
@vlun.get_vluns
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets information about a VLUN.
#
# ==== Attributes
#
# * volume_name: The volume name of the VLUN to find
# type volume_name: String
#
# ==== Returns
#
# VLUN object
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VLUN - VLUN doesn't exist
def get_vlun(volume_name)
begin
@vlun.get_vlun(volume_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a VLUN.
#
# ==== Attributes
#
# * volume_name: Volume name of the VLUN
# type volume_name: String
# * lun_id: LUN ID
# type lun_id: Integer
# * host_name: Name of the host which the volume is exported. For VLUN of port type,the value is empty
# type host_name: String
# * port: Specifies the system port of the VLUN export. It includes the system node number, PCI bus slot number, and card port number on the FC card in the format<node>:<slot>:<cardPort>
# type port: Hash
#
# port = {'node'=> 1, # System node (0-7)
# 'slot'=> 2, # PCI bus slot in the node (0-5)
# 'port'=>1} # Port number on the FC card (0-4)
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_MISSING_REQUIRED - Incomplete VLUN info. Missing
# volumeName or lun, or both hostname and port.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PORT_SELECTION - Specified port is invalid.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_RANGE - The LUN specified exceeds expected
# range.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - The host does not exist
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VLUN - The VLUN does not exist
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_PORT - The port does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
def delete_vlun(volume_name, lun_id, host_name = nil, port = nil)
begin
@vlun.delete_vlun(volume_name, lun_id, host_name, port)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets QoS Rules.
#
# ==== Returns
#
# Array of QoSRule objects
#
def query_qos_rules
begin
@qos.query_qos_rules
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Queries a QoS rule
#
# ==== Attributes
#
# * target_name : Name of the target. When targetType is sys, target name must be sys:all_others.
# type target_name: String
# * target_type : Target type is vvset or sys
# type target_type: String
# ==== Returns
#
# QoSRule object
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_QOS_RULE - QoS rule does not exist.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Illegal character in the input.
def query_qos_rule(target_name, target_type = 'vvset')
begin
@qos.query_qos_rule(target_name, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def qos_rule_exists?(target_name, target_type = 'vvset')
begin
@qos.qos_rule_exists?(target_name, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates QOS rules
# The QoS rule can be applied to VV sets. By using sys:all_others,
# you can apply the rule to all volumes in the system for which no
# QoS rule has been defined.
# ioMinGoal and ioMaxLimit must be used together to set I/O limits.
# Similarly, bwMinGoalKB and bwMaxLimitKB must be used together.
# If ioMaxLimitOP is set to 2 (no limit), ioMinGoalOP must also be
# to set to 2 (zero), and vice versa. They cannot be set to
# 'none' individually. Similarly, if bwMaxLimitOP is set to 2 (no
# limit), then bwMinGoalOP must also be set to 2.
# If ioMaxLimitOP is set to 1 (no limit), ioMinGoalOP must also be
# to set to 1 (zero) and vice versa. Similarly, if bwMaxLimitOP is
# set to 1 (zero), then bwMinGoalOP must also be set to 1.
# The ioMinGoalOP and ioMaxLimitOP fields take precedence over
# the ioMinGoal and ioMaxLimit fields.
# The bwMinGoalOP and bwMaxLimitOP fields take precedence over
# the bwMinGoalKB and bwMaxLimitKB fields
#
# ==== Attributes
#
# * target_type: Type of QoS target, either enum TARGET_TYPE_VVS or TARGET_TYPE_SYS.
# type target_type: VVSET or SYS. Refer QoStargetType::VVSET for complete enumeration
# * target_name: Name of the target object on which the QoS rule will be created.
# type target_name: String
# * qos_rules: QoS options
# type qos_rules: Hash
# qos_rules = {
# 'priority'=> 2, # Refer Hpe3parSdk::QoSpriorityEnumeration for complete enumeration
# 'bwMinGoalKB'=> 1024, # bandwidth rate minimum goal in
# # kilobytes per second
# 'bwMaxLimitKB'=> 1024, # bandwidth rate maximum limit in
# # kilobytes per second
# 'ioMinGoal'=> 10000, # I/O-per-second minimum goal
# 'ioMaxLimit'=> 2000000, # I/0-per-second maximum limit
# 'enable'=> false, # QoS rule for target enabled?
# 'bwMinGoalOP'=> 1, # zero none operation enum, when set to
# # 1, bandwidth minimum goal is 0
# # when set to 2, the bandwidth mimumum
# # goal is none (NoLimit)
# 'bwMaxLimitOP'=> 1, # zero none operation enum, when set to
# # 1, bandwidth maximum limit is 0
# # when set to 2, the bandwidth maximum
# # limit is none (NoLimit)
# 'ioMinGoalOP'=>1, # zero none operation enum, when set to
# # 1, I/O minimum goal is 0
# # when set to 2, the I/O minimum goal is
# # none (NoLimit)
# 'ioMaxLimitOP'=> 1, # zero none operation enum, when set to
# # 1, I/O maximum limit is 0
# # when set to 2, the I/O maximum limit
# # is none (NoLimit)
# 'latencyGoal'=>5000, # Latency goal in milliseconds
# 'defaultLatency'=> false# Use latencyGoal or defaultLatency?
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_RANGE - Invalid input: number exceeds expected range.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_QOS_RULE - QoS rule does not exists.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Illegal character in the input.
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_QOS_RULE - QoS rule already exists.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_MIN_GOAL_GRT_MAX_LIMIT - I/O-per-second maximum limit should be greater than the minimum goal.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BW_MIN_GOAL_GRT_MAX_LIMIT - Bandwidth maximum limit should be greater than the mimimum goal.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BELOW_RANGE - I/O-per-second limit is below range.Bandwidth limit is below range.
# * Hpe3parSdk::HTTPBadRequest
# - UNLICENSED_FEATURE - The system is not licensed for QoS.
def create_qos_rules(target_name, qos_rules, target_type = QoStargetType::VVSET)
if @current_version < @min_version && !qos_rules.nil?
qos_rules.delete_if { |key, _value| key == :latencyGoaluSecs }
end
begin
@qos.create_qos_rules(target_name, qos_rules, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies an existing QOS rules
#
# The QoS rule can be applied to VV sets. By using sys:all_others,
# you can apply the rule to all volumes in the system for which no
# QoS rule has been defined.
# ioMinGoal and ioMaxLimit must be used together to set I/O limits.
# Similarly, bwMinGoalKB and bwMaxLimitKB must be used together.
# If ioMaxLimitOP is set to 2 (no limit), ioMinGoalOP must also be
# to set to 2 (zero), and vice versa. They cannot be set to
# 'none' individually. Similarly, if bwMaxLimitOP is set to 2 (no
# limit), then bwMinGoalOP must also be set to 2.
# If ioMaxLimitOP is set to 1 (no limit), ioMinGoalOP must also be
# to set to 1 (zero) and vice versa. Similarly, if bwMaxLimitOP is
# set to 1 (zero), then bwMinGoalOP must also be set to 1.
# The ioMinGoalOP and ioMaxLimitOP fields take precedence over
# the ioMinGoal and ioMaxLimit fields.
# The bwMinGoalOP and bwMaxLimitOP fields take precedence over
# the bwMinGoalKB and bwMaxLimitKB fields
#
# ==== Attributes
#
# * target_name: Name of the target object on which the QoS rule will be created.
# type target_name: String
# * target_type: Type of QoS target, either vvset or sys.Refer Hpe3parSdk::QoStargetTypeConstants for complete enumeration
# type target_type: String
# * qos_rules: QoS options
# type qos_rules: Hash
# qos_rules = {
# 'priority'=> 2, # Refer Hpe3parSdk::QoSpriorityEnumeration for complete enumeration
# 'bwMinGoalKB'=> 1024, # bandwidth rate minimum goal in
# # kilobytes per second
# 'bwMaxLimitKB'=> 1024, # bandwidth rate maximum limit in
# # kilobytes per second
# 'ioMinGoal'=> 10000, # I/O-per-second minimum goal.
# 'ioMaxLimit'=> 2000000, # I/0-per-second maximum limit
# 'enable'=> True, # QoS rule for target enabled?
# 'bwMinGoalOP'=> 1, # zero none operation enum, when set to
# # 1, bandwidth minimum goal is 0
# # when set to 2, the bandwidth minimum
# # goal is none (NoLimit)
# 'bwMaxLimitOP'=> 1, # zero none operation enum, when set to
# # 1, bandwidth maximum limit is 0
# # when set to 2, the bandwidth maximum
# # limit is none (NoLimit)
# 'ioMinGoalOP'=> 1, # zero none operation enum, when set to
# # 1, I/O minimum goal minimum goal is 0
# # when set to 2, the I/O minimum goal is
# # none (NoLimit)
# 'ioMaxLimitOP'=> 1, # zero none operation enum, when set to
# # 1, I/O maximum limit is 0
# # when set to 2, the I/O maximum limit
# # is none (NoLimit)
# 'latencyGoal'=> 5000, # Latency goal in milliseconds
# 'defaultLatency'=> false# Use latencyGoal or defaultLatency?
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_EXCEEDS_RANGE - Invalid input: number exceeds expected
# range.
# * Hpe3parSdk::HTTPNotFound
# NON_EXISTENT_QOS_RULE - QoS rule does not exists.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_ILLEGAL_CHAR - Illegal character in the input.
# * Hpe3parSdk::HTTPBadRequest
# EXISTENT_QOS_RULE - QoS rule already exists.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_IO_MIN_GOAL_GRT_MAX_LIMIT - I/O-per-second maximum limit
# should be greater than the minimum goal.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_BW_MIN_GOAL_GRT_MAX_LIMIT - Bandwidth maximum limit
# should be greater than the minimum goal.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_BELOW_RANGE - I/O-per-second limit is below
# range. Bandwidth limit is below range.
# * Hpe3parSdk::HTTPBadRequest
# UNLICENSED_FEATURE - The system is not licensed for QoS.
def modify_qos_rules(target_name, qos_rules, target_type = QoStargetTypeConstants::VVSET)
if @current_version < @min_version && !qos_rules.nil?
qos_rules.delete_if { |key, _value| key == :latencyGoaluSecs }
end
begin
@qos.modify_qos_rules(target_name, qos_rules, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes QoS rules.
#
# ==== Attributes
#
# * target_name: Name of the target. When target_type is sys, target_name must be sys:all_others.
# type target_name: String
# * target_type: target type is vvset or sys
# type target_type: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# NON_EXISTENT_QOS_RULE - QoS rule does not exist.
# * Hpe3parSdk::HTTPBadRequest
# INV_INPUT_ILLEGAL_CHAR - Illegal character in the input
def delete_qos_rules(target_name, target_type = QoStargetTypeConstants::VVSET)
begin
@qos.delete_qos_rules(target_name, target_type)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets all hosts.
#
# ==== Returns
#
# Array of Host.
def get_hosts
begin
@host.get_hosts
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets host information by name.
#
# ==== Attributes
#
# * name - The name of the host to find.
# type name: String
#
# ==== Returns
#
# Host.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Invalid URI syntax.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - Host not found.
# * Hpe3parSdk::HTTPInternalServerError
# - INT_SERV_ERR - Internal server error.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Host name contains invalid character.
def get_host(name)
begin
@host.get_host(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new Host.
#
# ==== Attributes
#
# * name - The name of the host.
# type name: String
# * iscsi_names - Array of iSCSI iqns.
# type iscsi_names: Array
# * fcwwns - Array of Fibre Channel World Wide Names.
# type fcwwns: Array
# * optional - The optional stuff.
# type optional: Hash
# optional = {
# 'persona'=> 1, # Refer Hpe3parSdk::HostPersona for complete enumeration.
# # 3.1.3 default: Generic-ALUA
# # 3.1.2 default: General
# 'domain'=> 'myDomain', # Create the host in the
# # specified domain, or default
# # domain if unspecified.
# 'forceTearDown'=> false, # If True, force to tear down
# # low-priority VLUN exports.
# 'descriptors'=>
# {'location'=> 'earth', # The host's location
# 'IPAddr'=> '10.10.10.10', # The host's IP address
# 'os'=> 'linux', # The operating system running on the host.
# 'model'=> 'ex', # The host's model
# 'contact'=> 'Smith', # The host's owner and contact
# 'comment'=> "Joe's box"} # Additional host information
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_MISSING_REQUIRED - Name not specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PARAM_CONFLICT - FCWWNs and iSCSINames are both specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_LENGTH - Host name, domain name, or iSCSI name is too long.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EMPTY_STR - Input string (for domain name, iSCSI name, etc.) is empty.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Any error from host-name or domain-name parsing.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_TOO_MANY_WWN_OR_iSCSI - More than 1024 WWNs or iSCSI names are specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_WRONG_TYPE - The length of WWN is not 16. WWN specification contains non-hexadecimal digit.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_PATH - host WWN/iSCSI name already used by another host.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_HOST - host name is already used.
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - No space to create host.
def create_host(name, iscsi_names = nil, fcwwns = nil, optional = nil)
begin
@host.create_host(name, iscsi_names, fcwwns, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies an existing Host.
#
# ==== Attributes
#
# * name - Name of the host.
# type name: String
# * mod_request - Objects for host modification request.
# type mod_request: Hash
# mod_request = {
# 'newName'=> 'myNewName', # New name of the host
# 'pathOperation'=> 1, # Refer Hpe3parSdk::HostEditOperation for complete enumeration
# 'FCWWNs'=> [], # One or more WWN to set for the host.
# 'iSCSINames'=> [], # One or more iSCSI names to set for the host.
# 'forcePathRemoval'=> false, # If True, remove SSN(s) or
# # iSCSI(s) even if there are
# # VLUNs exported to host
# 'persona'=> 1, # Refer Hpe3parSdk::HostPersona for complete enumeration.
# 'descriptors'=>
# {'location'=> 'earth', # The host's location
# 'IPAddr'=> '10.10.10.10', # The host's IP address
# 'os'=> 'linux', # The operating system running on the host.
# 'model'=> 'ex', # The host's model
# 'contact'=> 'Smith', # The host's owner and contact
# 'comment'=> 'Joes box'} # Additional host information
# 'chapOperation'=> 1, # Refer Hpe3parSdk::HostEditOperation for complete enumeration
# 'chapOperationMode'=> TARGET, # Refer Hpe3parSdk::ChapOperationMode for complete enumeration
# 'chapName'=> 'MyChapName', # The chap name
# 'chapSecret'=> 'xyz', # The chap secret for the host or the target
# 'chapSecretHex'=> false, # If True, the chapSecret is treated as Hex.
# 'chapRemoveTargetOnly'=> true # If True, then remove target chap only
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Missing host name.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PARAM_CONFLICT - Both iSCSINames & FCWWNs are specified. (lot of other possibilities).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ONE_REQUIRED - iSCSINames or FCWwns missing.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ONE_REQUIRED - No path operation specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BAD_ENUM_VALUE - Invalid enum value.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_MISSING_REQUIRED - Required fields missing.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_LENGTH - Host descriptor argument length, new host name, or iSCSI name is too long.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Error parsing host or iSCSI name.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_HOST - New host name is already used.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - Host to be modified does not exist.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_TOO_MANY_WWN_OR_iSCSI - More than 1024 WWNs or iSCSI names are specified.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_WRONG_TYPE - Input value is of the wrong type.
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_PATH - WWN or iSCSI name is already claimed by other host.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_BAD_LENGTH - CHAP hex secret length is not 16 bytes, or chap ASCII secret length is not 12 to 16 characters.
# * Hpe3parSdk::HTTPNotFound
# - NO_INITIATOR_CHAP - Setting target CHAP without initiator CHAP.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_CHAP - Remove non-existing CHAP.
# * Hpe3parSdk::HTTPConflict
# - NON_UNIQUE_CHAP_SECRET - CHAP secret is not unique.
# * Hpe3parSdk::HTTPConflict
# - EXPORTED_VLUN - Setting persona with active export; remove a host path on an active export.
# * Hpe3parSdk::HTTPBadRequest
# - NON_EXISTENT_PATH - Remove a non-existing path.
# * Hpe3parSdk::HTTPConflict
# - LUN_HOSTPERSONA_CONFLICT - LUN number and persona capability conflict.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_PATH - Duplicate path specified.
def modify_host(name, mod_request)
begin
@host.modify_host(name, mod_request)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a host.
#
# ==== Attributes
#
# * name - The name of host to be deleted.
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - Host not found
# * Hpe3parSdk::HTTPConflict
# - HOST_IN_SET - Host is a member of a set
def delete_host(name)
begin
@host.delete_host(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Finds the host with the specified FC WWN path.
#
# ==== Attributes
#
# * wwn - Lookup based on WWN.
# type wwn: String
#
# ==== Returns
#
# Host with specified FC WWN.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Invalid URI syntax.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - HOST Not Found
# * Hpe3parSdk::HTTPInternalServerError
# - INTERNAL_SERVER_ERR - Internal server error.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Host name contains invalid character.
def query_host_by_fc_path(wwn = nil)
begin
@host.query_host_by_fc_path(wwn)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Finds the host with the specified iSCSI initiator.
#
# ==== Attributes
#
# * iqn - Lookup based on iSCSI initiator.
# type iqn: String
#
# ==== Returns
#
# Host with specified IQN.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Invalid URI syntax.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - The specified host not found.
# * Hpe3parSdk::HTTPInternalServerError
# - INTERNAL_SERVER_ERR - Internal server error.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - The host name contains invalid character.
def query_host_by_iscsi_path(iqn = nil)
begin
@host.query_host_by_iscsi_path(iqn)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets all host sets.
#
# ==== Returns
#
# Array of HostSet.
def get_host_sets
begin
@host_set.get_host_sets
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new HostSet.
#
# ==== Attributes
#
# * name - Name of the host set to be created.
# type name: String
# * domain - The domain in which the host set will be created.
# type domain: String
# * comment - Comment for the host set.
# type comment: String
# * setmembers - The hosts to be added to the set. The existence of the host will not be checked.
# type setmembers: Array of String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_SET - The set already exits.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_DOMAIN - The domain does not exist.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_DOMAINSET - The host is in a domain set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_SET - The object is already part of the set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_NOT_IN_SAME_DOMAIN - Objects must be in the same domain to perform this operation.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - The host does not exists.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_NAME - Invalid input (duplicate name).
def create_host_set(name, domain = nil, comment = nil, setmembers = nil)
begin
@host_set.create_host_set(name, domain, comment, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a HostSet.
#
# ==== Attributes
#
# * name - The hostset to delete.
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPConflict
# - EXPORTED_VLUN - The host set has exported VLUNs.
def delete_host_set(name)
begin
@host_set.delete_host_set(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies a HostSet.
#
# ==== Attributes
#
# * name - Hostset name
# type name: String
# * action - Add or Remove host(s) from the set
# type action: Refer values of Hpe3parSdk::SetCustomAction::MEM_ADD and Hpe3parSdk::SetCustomAction::MEM_REMOVE
# * setmembers - Host(s) to add to the set, the existence of the host(s) will not be checked
# type setmembers: Array of String
# * new_name - New name of set
# type new_name: String
# * comment - New comment for the set
# type comment: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_SET - The set already exits.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_DOMAINSET - The host is in a domain set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_SET - The object is already part of the set.
# * Hpe3parSdk::HTTPNotFound
# - MEMBER_NOT_IN_SET - The object is not part of the set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_NOT_IN_SAME_DOMAIN - Objects must be in the same domain to perform this operation.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_NAME - Invalid input (duplicate name).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PARAM_CONFLICT - Invalid input (parameters cannot be present at the same time).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Invalid contains one or more illegal characters.
def modify_host_set(name, action = nil, setmembers = nil, new_name = nil, comment = nil)
begin
@host_set.modify_host_set(name, action, setmembers, new_name, comment)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Adds host(s) to a host set.
#
# ==== Attributes
#
# * set_name - Hostset name.
# type set_name: String
# * setmembers - Array of host names to add to the set.
# type setmembers: Array of String
def add_hosts_to_host_set(set_name, setmembers)
begin
@host_set.add_hosts_to_host_set(set_name, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Removes host(s) from a host set.
#
# ==== Attributes
#
# * set_name - The host set name.
# type set_name: String
# * setmembers - Array of host names to remove from the set.
# type setmembers: Array of String
def remove_hosts_from_host_set(set_name, setmembers)
begin
@host_set.remove_hosts_from_host_set(set_name, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Returns an array of every Hostset the given host is a part of. The array can contain zero, one, or multiple items.
#
# ==== Attributes
#
# * host_name - The host name of whose hostset is to be found.
# type host_name: String
#
# ==== Returns
#
# Array of HostSet.
def find_host_sets(host_name)
begin
@host_set.find_host_sets(host_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets hostset information by name.
#
# ==== Attributes
#
# * name - The name of the hostset to find.
# type name: String
#
# ==== Returns
#
# HostSet.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exist.
def get_host_set(name)
begin
@host_set.get_host_set(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets all of the VLUNs on a specific host.
#
# ==== Attributes
#
# * host_name - Name of the host.
# type host_name: String
#
# ==== Returns
#
# Array of VLUN.
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_HOST - The specified host not found.
def get_host_vluns(host_name)
begin
@host.get_host_vluns(host_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets all Volumes in the array
#
# ==== Returns
#
# Array of VirtualVolume
def get_volumes
begin
@volume.get_volumes(VolumeCopyType::BASE_VOLUME)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the list of snapshots in the array
#
# ==== Returns
#
# Array of VirtualVolume
def get_snapshots
begin
@volume.get_volumes(VolumeCopyType::VIRTUAL_COPY)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets information about a volume by name
#
# ==== Attributes
#
# * name - The name of the volume to find
# type name: String
#
# ==== Returns
#
# VirtualVolume
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 23 message: volume does not exist
def get_volume(name)
begin
@volume.get_volume(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets information about a volume by wwn
#
# ==== Attributes
#
# * wwn - The wwn of the volume to find
# type wwn: String
#
# ==== Returns
#
# * VirtualVolume
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 23 message: volume does not exist
def get_volume_by_wwn(wwn)
begin
@volume.get_volume_by_wwn(wwn)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new volume.
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
# * cpg_name - the name of the destination CPG
# type cpg_name: String
# * size_MiB - size in MiB for the volume
# type size_MiB: Integer
# * optional - hash of other optional items
# type optional: hash
#
# optional = {
# 'id' => 12, # Volume ID. If not specified, next
# # available is chosen
# 'comment' => 'some comment', # Additional information up to 511
# # characters
# 'policies: { # Specifies VV policies
# 'staleSS' => false, # True allows stale snapshots.
# 'oneHost' => true, # True constrains volume export to
# # single host or host cluster
# 'zeroDetect' => true, # True requests Storage System to
# # scan for zeros in incoming write
# # data
# 'system' => false, # True special volume used by system
# # False is normal user volume
# 'caching' => true}, # Read-only. True indicates write &
# # read caching & read ahead enabled
# 'snapCPG' => 'CPG name', # CPG Used for snapshots
# 'ssSpcAllocWarningPct' => 12, # Snapshot space allocation warning
# 'ssSpcAllocLimitPct' => 22, # Snapshot space allocation limit
# 'tpvv' => true, # True: Create TPVV
# # False (default) Create FPVV
# 'usrSpcAllocWarningPct' => 22, # Enable user space allocation
# # warning
# 'usrSpcAllocLimitPct' => 22, # User space allocation limit
# 'expirationHours' => 256, # Relative time from now to expire
# # volume (max 43,800 hours)
# 'retentionHours' => 256 # Relative time from now to retain
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT - Invalid Parameter
# * Hpe3parSdk::HTTPBadRequest
# - TOO_LARGE - Volume size above limit
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Not Enough space is available
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_SV - Volume Exists already
def create_volume(name, cpg_name, size_MiB, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
optional.delete_if { |key, _value| key == :compression }
end
begin
@volume.create_volume(name, cpg_name, size_MiB, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a volume
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPForbidden
# - RETAINED - Volume retention time has not expired
# * Hpe3parSdk::HTTPForbidden
# - HAS_RO_CHILD - Volume has read-only child
# * Hpe3parSdk::HTTPConflict
# - HAS_CHILD - The volume has a child volume
# * Hpe3parSdk::HTTPConflict
# - IN_USE - The volume is in use by VV set, VLUN, etc
def delete_volume(name)
begin
@volume.delete_volume(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies a volume
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
# * volumeMods - Hash of volume attributes to change
# type volumeMods: Hash
# volumeMods = {
# 'newName' => 'newName', # New volume name
# 'comment' => 'some comment', # New volume comment
# 'snapCPG' => 'CPG name', # Snapshot CPG name
# 'policies: { # Specifies VV policies
# 'staleSS' => false, # True allows stale snapshots.
# 'oneHost' => true, # True constrains volume export to
# # single host or host cluster
# 'zeroDetect' => true, # True requests Storage System to
# # scan for zeros in incoming write
# # data
# 'system' => false, # True special volume used by system
# # False is normal user volume
# 'caching' => true}, # Read-only. True indicates write &
# # read caching & read ahead enabled
# 'ssSpcAllocWarningPct' => 12, # Snapshot space allocation warning
# 'ssSpcAllocLimitPct' => 22, # Snapshot space allocation limit
# 'tpvv' => true, # True: Create TPVV
# # False: (default) Create FPVV
# 'usrSpcAllocWarningPct' => 22, # Enable user space allocation
# # warning
# 'usrSpcAllocLimitPct' => 22, # User space allocation limit
# 'userCPG' => 'User CPG name', # User CPG name
# 'expirationHours' => 256, # Relative time from now to expire
# # volume (max 43,800 hours)
# 'retentionHours' => 256, # Relative time from now to retain
# # volume (max 43,800 hours)
# 'rmSsSpcAllocWarning' => false, # True removes snapshot space
# # allocation warning.
# # False sets it when value > 0
# 'rmUsrSpcAllocWarwaning' => false,# True removes user space
# # allocation warning.
# # False sets it when value > 0
# 'rmExpTime' => false, # True resets expiration time to 0.
# # False sets it when value > 0
# 'rmSsSpcAllocLimit' => false, # True removes snapshot space
# # allocation limit.
# # False sets it when value > 0
# 'rmUsrSpcAllocLimit' => false # True removes user space
# # allocation limit.
# # False sets it when value > 0
# }
#
# ==== Raises:
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_WARN_GT_LIMIT - Allocation warning level is higher than
# the limit.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_USR_ALRT_NON_TPVV - User space allocation alerts are
# valid only with a TPVV.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_RETAIN_GT_EXPIRE - Retention time is greater than
# expiration time.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_VV_POLICY - Invalid policy specification (for example,
# caching or system is set to true).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_LENGTH - Invalid input: string length exceeds
# limit.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_TIME - Invalid time specified.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_MODIFY_USR_CPG_TPVV - usr_cpg cannot be modified
# on a TPVV.
# * Hpe3parSdk::HTTPBadRequest
# - UNLICENSED_FEATURE - Retention time cannot be modified on a
# system without the Virtual Lock license.
# * Hpe3parSdk::HTTPForbidden
# - CPG_NOT_IN_SAME_DOMAIN - Snap CPG is not in the same domain as
# the user CPG.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_PEER_VOLUME - Cannot modify a peer volume.
# * Hpe3parSdk::HTTPInternalServerError
# - INT_SERV_ERR - Metadata of the VV is corrupted.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_SYS_VOLUME - Cannot modify retention time on a
# system volume.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - Cannot modify an internal
# volume
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_NOT_DEFINED_ALL_NODES - Cannot modify a
# volume until the volume is defined on all volumes.
# * Hpe3parSdk::HTTPConflict
# - INVALID_OPERATION_VV_ONLINE_COPY_IN_PROGRESS - Cannot modify a
# volume when an online copy for that volume is in progress.
# * Hpe3parSdk::HTTPConflict
# - INVALID_OPERATION_VV_VOLUME_CONV_IN_PROGRESS - Cannot modify a
# volume in the middle of a conversion operation.
# * Hpe3parSdk::HTTPConflict
# - INVALID_OPERATION_VV_SNAPSPACE_NOT_MOVED_TO_CPG - Snapshot space
# of a volume needs to be moved to a CPG before the user space.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_ACCOUNTING_IN_PROGRESS - The volume
# cannot be renamed until snapshot accounting has finished.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_ZERO_DETECT_TPVV - The zero_detect policy can be
# used only on TPVVs.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_CPG_ON_SNAPSHOT - CPG cannot be assigned to a
# snapshot.
def modify_volume(name, volume_mods)
begin
@volume.modify_volume(name, volume_mods)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Grows an existing volume by 'amount' Mebibytes.
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
# * amount: the additional size in MiB to add, rounded up to the next chunklet size (e.g. 256 or 1000 MiB)
# type amount: Integer
#
# ==== Raises:
#
# * Hpe3parSdk::HTTPForbidden
# - VV_NOT_IN_SAME_DOMAIN - The volume is not in the same domain.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The volume does not exist.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_UNSUPPORTED_VV_TYPE - Invalid operation: Cannot
# grow this type of volume.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_TUNE_IN_PROGRESS - Invalid operation: Volume
# tuning is in progress.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_EXCEEDS_LENGTH - Invalid input: String length exceeds
# limit.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_VV_GROW_SIZE - Invalid grow size.
# * Hpe3parSdk::HTTPForbidden
# - VV_NEW_SIZE_EXCEEDS_CPG_LIMIT - New volume size exceeds CPG limit
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - This operation is not allowed
# on an internal volume.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_CONV_IN_PROGRESS - Invalid operation: VV
# conversion is in progress.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_COPY_IN_PROGRESS - Invalid operation:
# online copy is in progress.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_CLEANUP_IN_PROGRESS - Internal volume cleanup is
# in progress.
# * Hpe3parSdk::HTTPForbidden
# - VV_IS_BEING_REMOVED - The volume is being removed.
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_INCONSISTENT_STATE - The volume has an internal consistency
# error.
# * Hpe3parSdk::HTTPForbidden
# - VV_SIZE_CANNOT_REDUCE - New volume size is smaller than the
# current size.
# * Hpe3parSdk::HTTPForbidden
# - VV_NEW_SIZE_EXCEEDS_LIMITS - New volume size exceeds the limit.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_SA_SD_SPACE_REMOVED - Invalid operation: Volume
# SA/SD space is being removed.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_IS_BUSY - Invalid operation: Volume is currently
# busy.
# * Hpe3parSdk::HTTPForbidden
# - VV_NOT_STARTED - Volume is not started.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_IS_PCOPY - Invalid operation: Volume is a
# physical copy.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_NOT_IN_NORMAL_STATE - Volume state is not normal
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_PROMOTE_IN_PROGRESS - Invalid operation: Volume
# promotion is in progress.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_PARENT_OF_PCOPY - Invalid operation: Volume is
# the parent of physical copy.
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Insufficent space for requested operation.
def grow_volume(name, amount)
begin
@volume.grow_volume(name, amount)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a physical copy of a VirtualVolume
#
# ==== Attributes
#
# * src_name - the source volume name
# type src_name: String
# * dest_name - the destination volume name
# type dest_name: String
# * dest_cpg - the destination CPG
# type dest_cpg: String
# * optional - Hash of optional parameters
# type optional: Hash
#
# optional = {
# 'online' => false, # should physical copy be
# # performed online?
# 'tpvv' => false, # use thin provisioned space
# # for destination
# # (online copy only)
# 'snapCPG' => 'OpenStack_SnapCPG', # snapshot CPG for the
# # destination
# # (online copy only)
# 'saveSnapshot' => false, # save the snapshot of the
# # source volume
# 'priority' => 1 # taskPriorityEnum (does not
# # apply to online copy - Hpe3parSdk::TaskPriority)
# }
def create_physical_copy(src_name, dest_name, dest_cpg, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
[:compression, :allowRemoteCopyParent, :skipZero].each { |key| optional.delete key }
end
begin
@volume.create_physical_copy(src_name, dest_name, dest_cpg, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a physical copy
#
# ==== Attributes
#
# * name - the name of the clone volume
# type name: String
#
# ==== Raises:
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPForbidden
# - RETAINED - Volume retention time has not expired
# * Hpe3parSdk::HTTPForbidden
# - HAS_RO_CHILD - Volume has read-only child
# * Hpe3parSdk::HTTPConflict
# - HAS_CHILD - The volume has a child volume
# * Hpe3parSdk::HTTPConflict
# - IN_USE - The volume is in use by VV set, VLUN, etc
def delete_physical_copy(name)
begin
@volume.delete_volume(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Tunes a volume
#
# ==== Attributes
#
# * name - the volume name
# type name: String
# * tune_operation - Enum of tune operation - 1: Change User CPG, 2: Change snap CPG
# type dest_name: Integer
# * optional - hash of optional parameters
# type optional: hash
#
# optional = {
# 'userCPG' => 'user_cpg', # Specifies the new user
# # CPG to which the volume
# # will be tuned.
# 'snapCPG' => 'snap_cpg', # Specifies the snap CPG to
# # which the volume will be
# # tuned.
# 'conversionOperation' => 1, # conversion operation enum. Refer Hpe3parSdk::VolumeConversionOperation
# 'keepVV' => 'new_volume', # Name of the new volume
# # where the original logical disks are saved.
# 'compression' => true # Enables (true) or disables (false) compression.
# # You cannot compress a fully provisioned volume.
# }
def tune_volume(name, tune_operation, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
optional.delete_if { |key, _value| key == :compression }
end
begin
object_hash = @volume.tune_volume(name, tune_operation, optional)
get_task(object_hash['taskid'])
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Returns an array of every VolumeSet the given volume is a part of.
# The array can contain zero, one, or multiple items.
#
# ==== Attributes
#
# * name - the volume name
# type name: String
#
# ==== Returns
#
# Array of VolumeSet
#
# ==== Raises
#
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_INCONSISTENT_STATE - Internal inconsistency error in vol
# * Hpe3parSdk::HTTPForbidden
# - VV_IS_BEING_REMOVED - The volume is being removed
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOLUME - The volume does not exists
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_SYS_VOLUME - Illegal op on system vol
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - Illegal op on internal vol
def find_all_volume_sets(name)
begin
@volume_set.find_all_volume_sets(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the Volume Sets
#
# ==== Returns
#
# Array of VolumeSet
def get_volume_sets
begin
@volume_set.get_volume_sets
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the information about a Volume Set.
#
# ==== Attributes
#
# * name - The name of the CPG to find
# type name: String
#
# ==== Returns
#
# VolumeSet
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 102 message: Set does not exist
def get_volume_set(name)
begin
@volume_set.get_volume_set(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new volume set
#
# ==== Attributes
#
# * name - the volume set to create
# type name: String
# * domain: the domain where the set lives
# type domain: String
# * comment: the comment for the vv set
# type comment: String
# * setmembers: the vv(s) to add to the set, the existence of the vv(s) will not be checked
# type name: Array of String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT Invalid URI Syntax.
# * Hpe3parSdk::HTTPBadRequest
# - NON_EXISTENT_DOMAIN - Domain doesn't exist.
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Not Enough space is available.
# * Hpe3parSdk::HTTPBadRequest
# - BAD_CPG_PATTERN A Pattern in a CPG specifies illegal values.
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_CPG - CPG Exists already
def create_volume_set(name, domain = nil, comment = nil, setmembers = nil)
begin
@volume_set.create_volume_set(name, domain, comment, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes the volume set. You must clear all QOS rules before a volume set can be deleted.
#
# ==== Attributes
#
# * name - The name of the VolumeSet
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPConflict
# - - EXPORTED_VLUN - The host set has exported VLUNs. The VV set was exported.
# * Hpe3parSdk::HTTPConflict
# - VVSET_QOS_TARGET - The object is already part of the set.
def delete_volume_set(name)
begin
@volume_set.delete_volume_set(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies a volume set by adding or removing a volume from the volume
# set. It's actions is based on the enums MEM_ADD or MEM_REMOVE.
#
# ==== Attributes
#
# * action: add or remove volume from the set
# type name: Hpe3parSdk::SetCustomAction
# * name: the volume set name
# type name: String
# * newName: new name of set
# type newName: String
# * comment: the comment for on the vv set
# type comment: String
# * flash_cache_policy: the flash-cache policy for the vv set
# type flash_cache_policy: enum
# * setmembers: the vv to add to the set, the existence of the vv will not be checked
# type name: Array of String
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_SET - The set already exits.
# * Hpe3parSdk::HTTPBadRequest
# - EXISTENT_SET - The set already exits.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_DOMAINSET - The host is in a domain set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_IN_SET - The object is already part of the set.
# * Hpe3parSdk::HTTPNotFound
# - MEMBER_NOT_IN_SET - The object is not part of the set.
# * Hpe3parSdk::HTTPConflict
# - MEMBER_NOT_IN_SAME_DOMAIN - Objects must be in the same domain to
# perform this operation.
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_INCONSISTENT_STATE - The volume has an internal
# inconsistency error.
# * Hpe3parSdk::HTTPForbidden
# - VV_IS_BEING_REMOVED - The volume is being removed.
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOLUME - The volume does not exists.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_SYS_VOLUME - The operation is not allowed on a
# system volume.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - The operation is not allowed
# on an internal volume.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_NAME - Invalid input (duplicate name).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_PARAM_CONFLICT - Invalid input (parameters cannot be
# present at the same time).
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_ILLEGAL_CHAR - Invalid contains one or more illegal
# characters.
def modify_volume_set(name, action = nil, newName = nil, comment = nil, flash_cache_policy = nil, setmembers = nil)
begin
@volume_set.modify_volume_set(name, action, newName, comment, flash_cache_policy, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Adds volume(s) to a volume set.
#
# ==== Attributes
#
# * set_name - the volume set name
# type set_name: String
# * setmembers - the volume(s) name to add
# type setmembers: Array of String
def add_volumes_to_volume_set(set_name, setmembers)
begin
@volume_set.add_volumes_to_volume_set(set_name, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Removes a volume from a volume set
#
# ==== Attributes
#
# * set_name - the volume set name
# type set_name: String
# * name - the volume name to remove
# type name: String
def remove_volumes_from_volume_set(set_name, setmembers)
begin
@volume_set.remove_volumes_from_volume_set(set_name, setmembers)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a snapshot of an existing VolumeSet
#
# ==== Attributes
#
# * name: Name of the Snapshot. The vvname pattern is described in "VV Name Patterns" in the HPE 3PAR Command Line Interface Reference, which is available at the following website: http://www.hp.com/go/storage/docs
# type name: String
# * copy_of_name: the name of the parent volume
# type copy_of_name: String
# * comment: the comment on the vv set
# type comment: String
# * optional: Hash of optional params
# type optional: Hash
# optional = {
# 'id' => 12, # Specifies ID of the volume set
# # set, next by default
# 'comment' => "some comment",
# 'readOnly' => true, # Read Only
# 'expirationHours' => 36, # time from now to expire
# 'retentionHours' => 12 # time from now to expire
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - INVALID_INPUT_VV_PATTERN - Invalid volume pattern specified
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_SET - The set does not exists.
# * Hpe3parSdk::HTTPNotFound
# - EMPTY_SET - The set is empty
# * Hpe3parSdk::HTTPServiceUnavailable
# - VV_LIMIT_REACHED - Maximum number of volumes reached
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The storage volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - VV_IS_BEING_REMOVED - The volume is being removed
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_READONLY_TO_READONLY_SNAP - Creating a read-only copy from a read-only volume is not permitted
# * Hpe3parSdk::HTTPConflict
# - NO_SNAP_CPG - No snapshot CPG has been configured for the volume
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_DUP_NAME - Invalid input (duplicate name).
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_SNAP_PARENT_SAME_BASE - Two parent snapshots share the same base volume
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_ONLINE_COPY_IN_PROGRESS - Invalid operation. Online copyis in progress
# * Hpe3parSdk::HTTPServiceUnavailable
# - VV_ID_LIMIT_REACHED - Max number of volumeIDs has been reached
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOLUME - The volume does not exists
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_STALE_STATE - The volume is in a stale state.
# * Hpe3parSdk::HTTPForbidden
# - VV_NOT_STARTED - Volume is not started
# * Hpe3parSdk::HTTPForbidden
# - VV_UNAVAILABLE - The volume is not accessible
# * Hpe3parSdk::HTTPServiceUnavailable
# - SNAPSHOT_LIMIT_REACHED - Max number of snapshots has been reached
# * Hpe3parSdk::HTTPServiceUnavailable
# - CPG_ALLOCATION_WARNING_REACHED - The CPG has reached the allocation warning
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_CONV_IN_PROGRESS - Invalid operation: VV conversion is in progress.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_CLEANUP_IN_PROGRESS - Internal volume cleanup is in progress.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_PEER_VOLUME - Cannot modify a peer volume.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_VV_VOLUME_CONV_IN_PROGRESS - INV_OPERATION_VV_ONLINE_COPY_IN_PROGRESS - The volume is the target of an online copy.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_INTERNAL_VOLUME - Illegal op on internal vol
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_ID - An ID exists
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_VV_NOT_IN_NORMAL_STATE - Volume state is not normal
# * Hpe3parSdk::HTTPForbidden
# - VV_IN_INCONSISTENT_STATE - Internal inconsistency error in vol
# * Hpe3parSdk::HTTPBadRequest
# - INVALID_INPUT_VV_PATTERN - - INV_INPUT_RETAIN_GT_EXPIRE - Retention time is greater than expiration time.
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT_TIME - Invalid time specified.
# * Hpe3parSdk::HTTPForbidden
# - INV_OPERATION_SNAPSHOT_NOT_SAME_TYPE - Some snapshots in the volume set are read-only, some are read-write
def create_snapshot_of_volume_set(name, copy_of_name, optional = nil)
begin
@volume_set.create_snapshot_of_volume_set(name, copy_of_name, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a snapshot of an existing Volume.
#
# ==== Attributes
#
# * name - the name of the Snapshot
# type name: String
# * copy_of_name - the name of the parent volume
# type copy_of_name: String
# * optional - Hash of other optional items
# type optional: Hash
#
# optional = {
# 'id' => 12, # Specifies the ID of the volume,
# # next by default
# 'comment' => "some comment",
# 'readOnly' => true, # Read Only
# 'expirationHours' => 36, # time from now to expire
# 'retentionHours' => 12 # time from now to expire
# }
#
# ==== Raises
#
# * Hpe3parSdk::HTTPNotFound
# - INON_EXISTENT_VOL - The volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
def create_snapshot(name, copy_of_name, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
optional.delete_if { |key, _value| key == :allowRemoteCopyParent }
end
begin
@volume.create_snapshot(name, copy_of_name, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Restores from a snapshot to a volume
#
# ==== Attributes
#
# * name - the name of the Snapshot
# type name: String
# * optional - hash of other optional items
# type name: Hash
#
# optional = {
# 'online' => false, # Enables (true) or disables
# #(false) executing the promote
# #operation on an online volume.
# #The default setting is false
#
# 'priority' => 2 #Does not apply to online promote
# #operation or to stop promote
# #operation.
#
# 'allowRemoteCopyParent' => false #Allows the promote operation to
# #proceed even if the RW parent
# #volume is currently in a Remote
# #Copy volume group, if that group
# #has not been started. If the
# #Remote Copy group has been
# #started, this command fails.
# #(WSAPI 1.6 and later.)
# }
#
def restore_snapshot(name, optional = nil)
if @current_version < @min_version_with_compression && !optional.nil?
optional.delete_if { |key, _value| key == :allowRemoteCopyParent }
end
begin
@volume.restore_snapshot(name, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a snapshot
#
# ==== Attributes
#
# * name - the name of the snapshot volume
# type name: String
#
# ==== Raises:
#
# * Hpe3parSdk::HTTPNotFound
# - NON_EXISTENT_VOL - The volume does not exist
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPForbidden
# - RETAINED - Volume retention time has not expired
# * Hpe3parSdk::HTTPForbidden
# - HAS_RO_CHILD - Volume has read-only child
# * Hpe3parSdk::HTTPConflict
# - HAS_CHILD - The volume has a child volume
# * Hpe3parSdk::HTTPConflict
# - IN_USE - The volume is in use by VV set, VLUN, etc
def delete_snapshot(name)
begin
@volume.delete_volume(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the snapshots of a particular volume
#
# ==== Attributes
#
# * name - the name of the volume
# type name: String
#
# ==== Returns
#
# Array of VirtualVolume
def get_volume_snapshots(name)
begin
@volume.get_volume_snapshots(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets an array of all ports on the 3PAR.
#
# ==== Returns
#
# Array of Port.
def get_ports
begin
@port.get_ports
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets an array of Fibre Channel Ports.
#
# * state - Port link state.
# type name: Integer. Refer Hpe3parSdk::PortLinkState for complete enumeration.
#
# ==== Returns
#
# Array of Fibre Channel Port.
def get_fc_ports(state = nil)
begin
@port.get_fc_ports(state)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets an array of iSCSI Ports.
#
# * state - Port link state.
# type name: Integer. Refer Hpe3parSdk::PortLinkState for complete enumeration.
#
# ==== Returns
#
# Array of iSCSI Port.
def get_iscsi_ports(state = nil)
begin
@port.get_iscsi_ports(state)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets an array of IP Ports.
#
# ==== Attributes
#
# * state - Port link state.
# type name: Integer. Refer Hpe3parSdk::PortLinkState for complete enumeration.
#
# ==== Returns
#
# Array of IP Port.
def get_ip_ports(state = nil)
begin
@port.get_ip_ports(state)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets entire list of CPGs.
#
# ==== Returns
#
# CPG array
def get_cpgs
begin
@cpg.get_cpgs
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets information about a Cpg.
#
# ==== Attributes
#
# * name - The name of the cpg to find
# type name: String
#
# ==== Returns
#
# CPG
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 15 message: cpg does not exist
def get_cpg(name)
begin
@cpg.get_cpg(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Creates a new CPG.
#
# ==== Attributes
#
# * name - Name of the cpg
# type name: String
# * optional - Hash of other optional items
# type optional: Hash
#
# optional = {
# 'growthIncrementMiB' 100, # Growth increment in MiB for
# # each auto-grown operation
# 'growthLimitMiB': 1024, # Auto-grow operation is limited
# # to specified storage amount
# 'usedLDWarningAlertMiB': 200, # Threshold to trigger warning
# # of used logical disk space
# 'domain': 'MyDomain', # Name of the domain object
# 'LDLayout': {
# 'RAIDType': 1, # Disk Raid Type
# 'setSize': 100, # Size in number of chunklets
# 'HA': 0, # Layout supports failure of
# # one port pair (1),
# # one cage (2),
# # or one magazine (3)
# 'chunkletPosPref': 2, # Chunklet location perference
# # characteristics.
# # Lowest Number/Fastest transfer
# # = 1
# # Higher Number/Slower transfer
# # = 2
# 'diskPatterns': []} # Patterns for candidate disks
# }
#
# ==== Raises
# * Hpe3parSdk::HTTPBadRequest
# - INV_INPUT Invalid URI Syntax.
# * Hpe3parSdk::HTTPBadRequest
# - NON_EXISTENT_DOMAIN - Domain doesn't exist.
# * Hpe3parSdk::HTTPBadRequest
# - NO_SPACE - Not Enough space is available.
# * Hpe3parSdk::HTTPBadRequest
# - BAD_CPG_PATTERN A Pattern in a CPG specifies illegal values.
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
# * Hpe3parSdk::HTTPConflict
# - EXISTENT_CPG - Cpg Exists already
def create_cpg(name, optional = nil)
begin
@cpg.create_cpg(name, optional)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Modifies a CPG.
#
# ==== Attributes
#
# * name - Name of the CPG
# type name: String
# * optional - hash of other optional items
# type optional: Hash
#
# optional = {
# 'newName'=> "newCPG:, # Specifies the name of the
# # CPG to update.
# 'disableAutoGrow'=>false, # Enables (false) or
# # disables (true) CPG auto
# # grow. Defaults to false.
# 'rmGrowthLimit'=> false, # Enables (false) or
# # disables (true) auto grow
# # limit enforcement. Defaults
# # to false.
# 'rmWarningAlert'=> false, # Enables (false) or
# # disables (true) warning
# # limit enforcement. Defaults
# # to false.
# }
#
def modify_cpg(name, cpg_mods)
begin
@cpg.modify_cpg(name, cpg_mods)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets available space information about a cpg.
#
# ==== Attributes
#
# * name - The name of the cpg to find
# type name: String
#
# ==== Returns
#
# Available space details in form of LDLayoutCapacity object
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 15 message: cpg does not exist
def get_cpg_available_space(name)
begin
@cpg.get_cpg_available_space(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Deletes a CPG.
#
# ==== Attributes
#
# * name - The name of the CPG
# type name: String
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error with code: 15 message: CPG does not exist
# * Hpe3parSdk::HTTPForbidden
# - IN_USE - The CPG Cannot be removed because it's in use.
# * Hpe3parSdk::HTTPForbidden
# - PERM_DENIED - Permission denied
def delete_cpg(name)
begin
@cpg.delete_cpg(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Gets the status of an online physical copy
#
# ==== Attributes
#
# * name - The name of the volume
# type name: str
#
# ==== Returns
#
# Status of online copy (String)
#
# ==== Raises
#
# * Hpe3parSdk::HPE3PARException
# Error: message: Volume not an online physical copy
def get_online_physical_copy_status(name)
begin
@volume.get_online_physical_copy_status(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Stops an offline physical copy operation
#
# ==== Attributes
#
# * name - The name of the volume
# type name: String
def stop_offline_physical_copy(name)
begin
@volume.stop_offline_physical_copy(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Stops an online physical copy operation
#
# ==== Attributes
#
# * name - The name of the volume
# type name: String
def stop_online_physical_copy(name)
begin
@volume.stop_online_physical_copy(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Resynchronizes a physical copy.
#
# ==== Attributes
#
# * name - The name of the volume
# type name: String
def resync_physical_copy(name)
begin
@volume.resync_physical_copy(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Waits for a 3PAR task to end.
#
# ==== Attributes
#
# * task_id - The Id of the task to be waited upon.
# type task_id: Integer
# * poll_rate_secs - The polling interval in seconds.
# type poll_rate_secs: Integer
def wait_for_task_to_end(task_id, poll_rate_secs = 15)
begin
@task.wait_for_task_to_end(task_id, poll_rate_secs)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Cancel a 3PAR task
#
# ==== Attributes
#
# * task_id - The Id of the task to be cancelled.
# type task_id: Integer
# ==== Raises
#
# * Hpe3parSdk::HTTPBadRequest
# - NON_ACTIVE_TASK - The task is not active at this time.
# * Hpe3parSdk::HTTPConflict
# - INV_OPERATION_CANNOT_CANCEL_ TASK - Invalid operation: Task cannot be cancelled.
def cancel_task(task_id)
begin
@task.cancel_task(task_id)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def flash_cache_exists?
begin
@flash_cache.flash_cache_exists?
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def volume_exists?(name)
begin
@volume.volume_exists?(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def volume_set_exists?(name)
begin
@volume_set.volume_set_exists?(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def host_exists?(host_name)
begin
@host.host_exists?(host_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def host_set_exists?(host_name)
begin
@host_set.host_set_exists?(host_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def cpg_exists?(name)
begin
@cpg.cpg_exists?(name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def flash_cache_exists?
begin
@flash_cache.flash_cache_exists?
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def online_physical_copy_exists?(src_name, phy_copy_name)
begin
@volume.online_physical_copy_exists?(src_name, phy_copy_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
def offline_physical_copy_exists?(src_name, phy_copy_name)
begin
@volume.offline_physical_copy_exists?(src_name, phy_copy_name)
rescue => ex
Util.log_exception(ex, caller_locations(1, 1)[0].label)
raise ex
end
end
# Logout from the 3PAR Array
end
|
moneta-rb/moneta | lib/moneta/synchronize.rb | Moneta.SynchronizePrimitive.enter | ruby | def enter(timeout = nil, wait = 0.01)
time_at_timeout = Time.now + timeout if timeout
while !timeout || Time.now < time_at_timeout
return true if try_enter
sleep(wait)
end
false
end | Enter critical section (blocking)
@param [Number] timeout Maximum time to wait
@param [Number] wait Sleep time between tries to acquire lock
@return [Boolean] true if the lock was aquired | train | https://github.com/moneta-rb/moneta/blob/26a118c8b2c93d11257f4a5fe9334a8157f4db47/lib/moneta/synchronize.rb#L31-L38 | class SynchronizePrimitive
# Synchronize block
#
# @api public
# @yieldparam Synchronized block
# @return [Object] result of block
def synchronize
enter
yield
ensure
leave
end
# Try to enter critical section (nonblocking)
#
# @return [Boolean] true if the lock was acquired
def try_enter
raise 'Already locked' if @locked
enter_primitive ? @locked = true : false
end
alias_method :try_lock, :try_enter
# Enter critical section (blocking)
#
# @param [Number] timeout Maximum time to wait
# @param [Number] wait Sleep time between tries to acquire lock
# @return [Boolean] true if the lock was aquired
alias_method :lock, :enter
# Leave critical section
def leave
raise 'Not locked' unless @locked
leave_primitive
@locked = false
nil
end
alias_method :unlock, :leave
# Is the lock acquired?
def locked?
@locked
end
end
|
rjurado01/rapidoc | lib/rapidoc/resources_extractor.rb | Rapidoc.ResourcesExtractor.get_routes_doc | ruby | def get_routes_doc
puts "Executing 'rake routes'..." if trace?
routes_doc = RoutesDoc.new
routes = Dir.chdir( ::Rails.root.to_s ) { `rake routes` }
routes.split("\n").each do |entry|
routes_doc.add_route( entry ) unless entry.match(/URI/)
end
routes_doc
end | Reads 'rake routes' output and gets the routes info
@return [RoutesDoc] class with routes info | train | https://github.com/rjurado01/rapidoc/blob/03b7a8f29a37dd03f4ed5036697b48551d3b4ae6/lib/rapidoc/resources_extractor.rb#L18-L29 | module ResourcesExtractor
##
# Reads 'rake routes' output and gets the routes info
# @return [RoutesDoc] class with routes info
#
##
# Create new ResourceDoc for each resource extracted from RoutesDoc
# @return [Array] ResourceDoc array
#
def get_resources
routes_doc = get_routes_doc
resources_names = routes_doc.get_resources_names - resources_black_list
resources_names.map do |resource|
puts "Generating #{resource} documentation..." if trace?
ResourceDoc.new( resource, routes_doc.get_actions_route_info( resource ) )
end
end
end
|
algolia/algoliasearch-client-ruby | lib/algolia/index.rb | Algolia.Index.add_objects! | ruby | def add_objects!(objects, request_options = {})
res = add_objects(objects, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end | Add several objects in this index and wait end of indexing
@param objects the array of objects to add inside the index.
Each object is represented by an associative array
@param request_options contains extra parameters to send with your query | train | https://github.com/algolia/algoliasearch-client-ruby/blob/5292cd9b1029f879e4e0257a3e89d0dc9ad0df3b/lib/algolia/index.rb#L91-L95 | class Index
attr_accessor :name, :client
def initialize(name, client = nil)
self.name = name
self.client = client || Algolia.client
end
#
# Delete an index
#
# @param request_options contains extra parameters to send with your query
#
# return an hash of the form { "deletedAt" => "2013-01-18T15:33:13.556Z", "taskID" => "42" }
#
def delete(request_options = {})
client.delete(Protocol.index_uri(name), :write, request_options)
end
alias_method :delete_index, :delete
#
# Delete an index and wait until the deletion has been processed
#
# @param request_options contains extra parameters to send with your query
#
# return an hash of the form { "deletedAt" => "2013-01-18T15:33:13.556Z", "taskID" => "42" }
#
def delete!(request_options = {})
res = delete(request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
alias_method :delete_index!, :delete!
#
# Add an object in this index
#
# @param object the object to add to the index.
# The object is represented by an associative array
# @param objectID (optional) an objectID you want to attribute to this object
# (if the attribute already exist the old object will be overridden)
# @param request_options contains extra parameters to send with your query
#
def add_object(object, objectID = nil, request_options = {})
check_object(object)
if objectID.nil? || objectID.to_s.empty?
client.post(Protocol.index_uri(name), object.to_json, :write, request_options)
else
client.put(Protocol.object_uri(name, objectID), object.to_json, :write, request_options)
end
end
#
# Add an object in this index and wait end of indexing
#
# @param object the object to add to the index.
# The object is represented by an associative array
# @param objectID (optional) an objectID you want to attribute to this object
# (if the attribute already exist the old object will be overridden)
# @param Request options object. Contains extra URL parameters or headers
#
def add_object!(object, objectID = nil, request_options = {})
res = add_object(object, objectID, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Add several objects in this index
#
# @param objects the array of objects to add inside the index.
# Each object is represented by an associative array
# @param request_options contains extra parameters to send with your query
#
def add_objects(objects, request_options = {})
batch(build_batch('addObject', objects, false), request_options)
end
#
# Add several objects in this index and wait end of indexing
#
# @param objects the array of objects to add inside the index.
# Each object is represented by an associative array
# @param request_options contains extra parameters to send with your query
#
#
# Search inside the index
#
# @param query the full text query
# @param args (optional) if set, contains an associative array with query parameters:
# - page: (integer) Pagination parameter used to select the page to retrieve.
# Page is zero-based and defaults to 0. Thus, to retrieve the 10th page you need to set page=9
# - hitsPerPage: (integer) Pagination parameter used to select the number of hits per page. Defaults to 20.
# - attributesToRetrieve: a string that contains the list of object attributes you want to retrieve (let you minimize the answer size).
# Attributes are separated with a comma (for example "name,address").
# You can also use a string array encoding (for example ["name","address"]).
# By default, all attributes are retrieved. You can also use '*' to retrieve all values when an attributesToRetrieve setting is specified for your index.
# - attributesToHighlight: a string that contains the list of attributes you want to highlight according to the query.
# Attributes are separated by a comma. You can also use a string array encoding (for example ["name","address"]).
# If an attribute has no match for the query, the raw value is returned. By default all indexed text attributes are highlighted.
# You can use `*` if you want to highlight all textual attributes. Numerical attributes are not highlighted.
# A matchLevel is returned for each highlighted attribute and can contain:
# - full: if all the query terms were found in the attribute,
# - partial: if only some of the query terms were found,
# - none: if none of the query terms were found.
# - attributesToSnippet: a string that contains the list of attributes to snippet alongside the number of words to return (syntax is `attributeName:nbWords`).
# Attributes are separated by a comma (Example: attributesToSnippet=name:10,content:10).
# You can also use a string array encoding (Example: attributesToSnippet: ["name:10","content:10"]). By default no snippet is computed.
# - minWordSizefor1Typo: the minimum number of characters in a query word to accept one typo in this word. Defaults to 3.
# - minWordSizefor2Typos: the minimum number of characters in a query word to accept two typos in this word. Defaults to 7.
# - getRankingInfo: if set to 1, the result hits will contain ranking information in _rankingInfo attribute.
# - aroundLatLng: search for entries around a given latitude/longitude (specified as two floats separated by a comma).
# For example aroundLatLng=47.316669,5.016670).
# You can specify the maximum distance in meters with the aroundRadius parameter (in meters) and the precision for ranking with aroundPrecision
# (for example if you set aroundPrecision=100, two objects that are distant of less than 100m will be considered as identical for "geo" ranking parameter).
# At indexing, you should specify geoloc of an object with the _geoloc attribute (in the form {"_geoloc":{"lat":48.853409, "lng":2.348800}})
# - insideBoundingBox: search entries inside a given area defined by the two extreme points of a rectangle (defined by 4 floats: p1Lat,p1Lng,p2Lat,p2Lng).
# For example insideBoundingBox=47.3165,4.9665,47.3424,5.0201).
# At indexing, you should specify geoloc of an object with the _geoloc attribute (in the form {"_geoloc":{"lat":48.853409, "lng":2.348800}})
# - numericFilters: a string that contains the list of numeric filters you want to apply separated by a comma.
# The syntax of one filter is `attributeName` followed by `operand` followed by `value`. Supported operands are `<`, `<=`, `=`, `>` and `>=`.
# You can have multiple conditions on one attribute like for example numericFilters=price>100,price<1000.
# You can also use a string array encoding (for example numericFilters: ["price>100","price<1000"]).
# - tagFilters: filter the query by a set of tags. You can AND tags by separating them by commas.
# To OR tags, you must add parentheses. For example, tags=tag1,(tag2,tag3) means tag1 AND (tag2 OR tag3).
# You can also use a string array encoding, for example tagFilters: ["tag1",["tag2","tag3"]] means tag1 AND (tag2 OR tag3).
# At indexing, tags should be added in the _tags** attribute of objects (for example {"_tags":["tag1","tag2"]}).
# - facetFilters: filter the query by a list of facets.
# Facets are separated by commas and each facet is encoded as `attributeName:value`.
# For example: `facetFilters=category:Book,author:John%20Doe`.
# You can also use a string array encoding (for example `["category:Book","author:John%20Doe"]`).
# - facets: List of object attributes that you want to use for faceting.
# Attributes are separated with a comma (for example `"category,author"` ).
# You can also use a JSON string array encoding (for example ["category","author"]).
# Only attributes that have been added in **attributesForFaceting** index setting can be used in this parameter.
# You can also use `*` to perform faceting on all attributes specified in **attributesForFaceting**.
# - queryType: select how the query words are interpreted, it can be one of the following value:
# - prefixAll: all query words are interpreted as prefixes,
# - prefixLast: only the last word is interpreted as a prefix (default behavior),
# - prefixNone: no query word is interpreted as a prefix. This option is not recommended.
# - optionalWords: a string that contains the list of words that should be considered as optional when found in the query.
# The list of words is comma separated.
# - distinct: If set to 1, enable the distinct feature (disabled by default) if the attributeForDistinct index setting is set.
# This feature is similar to the SQL "distinct" keyword: when enabled in a query with the distinct=1 parameter,
# all hits containing a duplicate value for the attributeForDistinct attribute are removed from results.
# For example, if the chosen attribute is show_name and several hits have the same value for show_name, then only the best
# one is kept and others are removed.
# @param request_options contains extra parameters to send with your query
#
def search(query, params = {}, request_options = {})
encoded_params = Hash[params.map { |k, v| [k.to_s, v.is_a?(Array) ? v.to_json : v] }]
encoded_params[:query] = query
client.post(Protocol.search_post_uri(name), { :params => Protocol.to_query(encoded_params) }.to_json, :search, request_options)
end
class IndexBrowser
def initialize(client, name, params)
@client = client
@name = name
@params = params
@cursor = params[:cursor] || params['cursor'] || nil
end
def browse(request_options = {}, &block)
loop do
answer = @client.get(Protocol.browse_uri(@name, @params.merge({ :cursor => @cursor })), :read, request_options)
answer['hits'].each do |hit|
if block.arity == 2
yield hit, @cursor
else
yield hit
end
end
@cursor = answer['cursor']
break if @cursor.nil?
end
end
end
#
# Browse all index content
#
# @param queryParameters The hash of query parameters to use to browse
# To browse from a specific cursor, just add a ":cursor" parameters
# @param queryParameters An optional second parameters hash here for backward-compatibility (which will be merged with the first)
# @param request_options contains extra parameters to send with your query
#
# @DEPRECATED:
# @param page Pagination parameter used to select the page to retrieve.
# @param hits_per_page Pagination parameter used to select the number of hits per page. Defaults to 1000.
#
def browse(page_or_query_parameters = nil, hits_per_page = nil, request_options = {}, &block)
params = {}
if page_or_query_parameters.is_a?(Hash)
params.merge!(page_or_query_parameters)
else
params[:page] = page_or_query_parameters unless page_or_query_parameters.nil?
end
if hits_per_page.is_a?(Hash)
params.merge!(hits_per_page)
else
params[:hitsPerPage] = hits_per_page unless hits_per_page.nil?
end
if block_given?
IndexBrowser.new(client, name, params).browse(request_options, &block)
else
params[:page] ||= 0
params[:hitsPerPage] ||= 1000
client.get(Protocol.browse_uri(name, params), :read, request_options)
end
end
#
# Browse a single page from a specific cursor
#
# @param request_options contains extra parameters to send with your query
#
def browse_from(cursor, hits_per_page = 1000, request_options = {})
client.post(Protocol.browse_uri(name), { :cursor => cursor, :hitsPerPage => hits_per_page }.to_json, :read, request_options)
end
#
# Get an object from this index
#
# @param objectID the unique identifier of the object to retrieve
# @param attributes_to_retrieve (optional) if set, contains the list of attributes to retrieve as an array of strings of a string separated by ","
# @param request_options contains extra parameters to send with your query
#
def get_object(objectID, attributes_to_retrieve = nil, request_options = {})
attributes_to_retrieve = attributes_to_retrieve.join(',') if attributes_to_retrieve.is_a?(Array)
if attributes_to_retrieve.nil?
client.get(Protocol.object_uri(name, objectID, nil), :read, request_options)
else
client.get(Protocol.object_uri(name, objectID, { :attributes => attributes_to_retrieve }), :read, request_options)
end
end
#
# Get a list of objects from this index
#
# @param objectIDs the array of unique identifier of the objects to retrieve
# @param attributes_to_retrieve (optional) if set, contains the list of attributes to retrieve as an array of strings of a string separated by ","
# @param request_options contains extra parameters to send with your query
#
def get_objects(objectIDs, attributes_to_retrieve = nil, request_options = {})
attributes_to_retrieve = attributes_to_retrieve.join(',') if attributes_to_retrieve.is_a?(Array)
requests = objectIDs.map do |objectID|
req = { :indexName => name, :objectID => objectID.to_s }
req[:attributesToRetrieve] = attributes_to_retrieve unless attributes_to_retrieve.nil?
req
end
client.post(Protocol.objects_uri, { :requests => requests }.to_json, :read, request_options)['results']
end
#
# Check the status of a task on the server.
# All server task are asynchronous and you can check the status of a task with this method.
#
# @param taskID the id of the task returned by server
# @param request_options contains extra parameters to send with your query
#
def get_task_status(taskID, request_options = {})
client.get_task_status(name, taskID, request_options)
end
#
# Wait the publication of a task on the server.
# All server task are asynchronous and you can check with this method that the task is published.
#
# @param taskID the id of the task returned by server
# @param time_before_retry the time in milliseconds before retry (default = 100ms)
# @param request_options contains extra parameters to send with your query
#
def wait_task(taskID, time_before_retry = WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options = {})
client.wait_task(name, taskID, time_before_retry, request_options)
end
#
# Override the content of an object
#
# @param object the object to save
# @param objectID the associated objectID, if nil 'object' must contain an 'objectID' key
# @param request_options contains extra parameters to send with your query
#
def save_object(object, objectID = nil, request_options = {})
client.put(Protocol.object_uri(name, get_objectID(object, objectID)), object.to_json, :write, request_options)
end
#
# Override the content of object and wait end of indexing
#
# @param object the object to save
# @param objectID the associated objectID, if nil 'object' must contain an 'objectID' key
# @param request_options contains extra parameters to send with your query
#
def save_object!(object, objectID = nil, request_options = {})
res = save_object(object, objectID, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Override the content of several objects
#
# @param objects the array of objects to save, each object must contain an 'objectID' key
# @param request_options contains extra parameters to send with your query
#
def save_objects(objects, request_options = {})
batch(build_batch('updateObject', objects, true), request_options)
end
#
# Override the content of several objects and wait end of indexing
#
# @param objects the array of objects to save, each object must contain an objectID attribute
# @param request_options contains extra parameters to send with your query
#
def save_objects!(objects, request_options = {})
res = save_objects(objects, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Override the current objects by the given array of objects and wait end of indexing. Settings,
# synonyms and query rules are untouched. The objects are replaced without any downtime.
#
# @param objects the array of objects to save
# @param request_options contains extra parameters to send with your query
#
def replace_all_objects(objects, request_options = {})
safe = request_options[:safe] || request_options['safe'] || false
request_options.delete(:safe)
request_options.delete('safe')
tmp_index = @client.init_index(@name + '_tmp_' + rand(10000000).to_s)
responses = []
scope = ['settings', 'synonyms', 'rules']
res = @client.copy_index(@name, tmp_index.name, scope, request_options)
responses << res
if safe
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
end
batch = []
batch_size = 1000
count = 0
objects.each do |object|
batch << object
count += 1
if count == batch_size
res = tmp_index.add_objects(batch, request_options)
responses << res
batch = []
count = 0
end
end
if batch.any?
res = tmp_index.add_objects(batch, request_options)
responses << res
end
if safe
responses.each do |res|
tmp_index.wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
end
end
res = @client.move_index(tmp_index.name, @name, request_options)
responses << res
if safe
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
end
responses
end
#
# Override the current objects by the given array of objects and wait end of indexing
#
# @param objects the array of objects to save
# @param request_options contains extra parameters to send with your query
#
def replace_all_objects!(objects, request_options = {})
replace_all_objects(objects, request_options.merge(:safe => true))
end
#
# Update partially an object (only update attributes passed in argument)
#
# @param object the object attributes to override
# @param objectID the associated objectID, if nil 'object' must contain an 'objectID' key
# @param create_if_not_exits a boolean, if true creates the object if this one doesn't exist
# @param request_options contains extra parameters to send with your query
#
def partial_update_object(object, objectID = nil, create_if_not_exits = true, request_options = {})
client.post(Protocol.partial_object_uri(name, get_objectID(object, objectID), create_if_not_exits), object.to_json, :write, request_options)
end
#
# Partially override the content of several objects
#
# @param objects an array of objects to update (each object must contains a objectID attribute)
# @param create_if_not_exits a boolean, if true create the objects if they don't exist
# @param request_options contains extra parameters to send with your query
#
def partial_update_objects(objects, create_if_not_exits = true, request_options = {})
if create_if_not_exits
batch(build_batch('partialUpdateObject', objects, true), request_options)
else
batch(build_batch('partialUpdateObjectNoCreate', objects, true), request_options)
end
end
#
# Partially override the content of several objects and wait end of indexing
#
# @param objects an array of objects to update (each object must contains a objectID attribute)
# @param create_if_not_exits a boolean, if true create the objects if they don't exist
# @param request_options contains extra parameters to send with your query
#
def partial_update_objects!(objects, create_if_not_exits = true, request_options = {})
res = partial_update_objects(objects, create_if_not_exits, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Update partially an object (only update attributes passed in argument) and wait indexing
#
# @param object the attributes to override
# @param objectID the associated objectID, if nil 'object' must contain an 'objectID' key
# @param create_if_not_exits a boolean, if true creates the object if this one doesn't exist
# @param request_options contains extra parameters to send with your query
#
def partial_update_object!(object, objectID = nil, create_if_not_exits = true, request_options = {})
res = partial_update_object(object, objectID, create_if_not_exits, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Delete an object from the index
#
# @param objectID the unique identifier of object to delete
# @param request_options contains extra parameters to send with your query
#
def delete_object(objectID, request_options = {})
raise ArgumentError.new('objectID must not be blank') if objectID.nil? || objectID == ''
client.delete(Protocol.object_uri(name, objectID), :write, request_options)
end
#
# Delete an object from the index and wait end of indexing
#
# @param objectID the unique identifier of object to delete
# @param request_options contains extra parameters to send with your query
#
def delete_object!(objectID, request_options = {})
res = delete_object(objectID, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Delete several objects
#
# @param objects an array of objectIDs
# @param request_options contains extra parameters to send with your query
#
def delete_objects(objects, request_options = {})
check_array(objects)
batch(build_batch('deleteObject', objects.map { |objectID| { :objectID => objectID } }, false), request_options)
end
#
# Delete several objects and wait end of indexing
#
# @param objects an array of objectIDs
# @param request_options contains extra parameters to send with your query
#
def delete_objects!(objects, request_options = {})
res = delete_objects(objects, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Delete all objects matching a query
# This method retrieves all objects synchronously but deletes in batch
# asynchronously
#
# @param query the query string
# @param params the optional query parameters
# @param request_options contains extra parameters to send with your query
#
def delete_by_query(query, params = nil, request_options = {})
raise ArgumentError.new('query cannot be nil, use the `clear` method to wipe the entire index') if query.nil? && params.nil?
params = sanitized_delete_by_query_params(params)
params[:query] = query
params[:hitsPerPage] = 1000
params[:distinct] = false
params[:attributesToRetrieve] = ['objectID']
params[:cursor] = ''
ids = []
while params[:cursor] != nil
result = browse(params, nil, request_options)
params[:cursor] = result['cursor']
hits = result['hits']
break if hits.empty?
ids += hits.map { |hit| hit['objectID'] }
end
delete_objects(ids, request_options)
end
#
# Delete all objects matching a query and wait end of indexing
#
# @param query the query string
# @param params the optional query parameters
# @param request_options contains extra parameters to send with your query
#
def delete_by_query!(query, params = nil, request_options = {})
res = delete_by_query(query, params, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options) if res
res
end
#
# Delete all objects matching a query (doesn't work with actual text queries)
# This method deletes every record matching the filters provided
#
# @param params query parameters
# @param request_options contains extra parameters to send with your query
#
def delete_by(params, request_options = {})
raise ArgumentError.new('params cannot be nil, use the `clear` method to wipe the entire index') if params.nil?
params = sanitized_delete_by_query_params(params)
client.post(Protocol.delete_by_uri(name), params.to_json, :write, request_options)
end
#
# Delete all objects matching a query (doesn't work with actual text queries)
# This method deletes every record matching the filters provided and waits for the end of indexing
# @param params query parameters
# @param request_options contains extra parameters to send with your query
#
def delete_by!(params, request_options = {})
res = delete_by(params, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options) if res
res
end
#
# Delete the index content
#
# @param request_options contains extra parameters to send with your query
#
def clear(request_options = {})
client.post(Protocol.clear_uri(name), {}, :write, request_options)
end
alias_method :clear_index, :clear
#
# Delete the index content and wait end of indexing
#
def clear!(request_options = {})
res = clear(request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
alias_method :clear_index!, :clear!
#
# Set settings for this index
#
def set_settings(new_settings, options = {}, request_options = {})
client.put(Protocol.settings_uri(name, options), new_settings.to_json, :write, request_options)
end
#
# Set settings for this index and wait end of indexing
#
def set_settings!(new_settings, options = {}, request_options = {})
res = set_settings(new_settings, options, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Get settings of this index
#
def get_settings(options = {}, request_options = {})
options['getVersion'] = 2 if !options[:getVersion] && !options['getVersion']
client.get(Protocol.settings_uri(name, options).to_s, :read, request_options)
end
#
# List all existing user keys with their associated ACLs
#
# Deprecated: Please us `client.list_api_keys` instead.
def list_api_keys(request_options = {})
client.get(Protocol.index_keys_uri(name), :read, request_options)
end
#
# Get ACL of a user key
#
# Deprecated: Please us `client.get_api_key` instead.
def get_api_key(key, request_options = {})
client.get(Protocol.index_key_uri(name, key), :read, request_options)
end
#
# Create a new user key
#
# @param object can be two different parameters:
# The list of parameters for this key. Defined by a Hash that can
# contains the following values:
# - acl: array of string
# - validity: int
# - referers: array of string
# - description: string
# - maxHitsPerQuery: integer
# - queryParameters: string
# - maxQueriesPerIPPerHour: integer
# Or the list of ACL for this key. Defined by an array of String that
# can contains the following values:
# - search: allow to search (https and http)
# - addObject: allows to add/update an object in the index (https only)
# - deleteObject : allows to delete an existing object (https only)
# - deleteIndex : allows to delete index content (https only)
# - settings : allows to get index settings (https only)
# - editSettings : allows to change index settings (https only)
# @param validity the number of seconds after which the key will be automatically removed (0 means no time limit for this key)
# @param max_queries_per_IP_per_hour the maximum number of API calls allowed from an IP address per hour (0 means unlimited)
# @param max_hits_per_query the maximum number of hits this API key can retrieve in one call (0 means unlimited)
# @param request_options contains extra parameters to send with your query
#
# Deprecated: Please use `client.add_api_key` instead
def add_api_key(object, validity = 0, max_queries_per_IP_per_hour = 0, max_hits_per_query = 0, request_options = {})
if object.instance_of?(Array)
params = { :acl => object }
else
params = object
end
params['validity'] = validity.to_i if validity != 0
params['maxHitsPerQuery'] = max_hits_per_query.to_i if max_hits_per_query != 0
params['maxQueriesPerIPPerHour'] = max_queries_per_IP_per_hour.to_i if max_queries_per_IP_per_hour != 0
client.post(Protocol.index_keys_uri(name), params.to_json, :write, request_options)
end
#
# Update a user key
#
# @param object can be two different parameters:
# The list of parameters for this key. Defined by a Hash that
# can contains the following values:
# - acl: array of string
# - validity: int
# - referers: array of string
# - description: string
# - maxHitsPerQuery: integer
# - queryParameters: string
# - maxQueriesPerIPPerHour: integer
# Or the list of ACL for this key. Defined by an array of String that
# can contains the following values:
# - search: allow to search (https and http)
# - addObject: allows to add/update an object in the index (https only)
# - deleteObject : allows to delete an existing object (https only)
# - deleteIndex : allows to delete index content (https only)
# - settings : allows to get index settings (https only)
# - editSettings : allows to change index settings (https only)
# @param validity the number of seconds after which the key will be automatically removed (0 means no time limit for this key)
# @param max_queries_per_IP_per_hour the maximum number of API calls allowed from an IP address per hour (0 means unlimited)
# @param max_hits_per_query the maximum number of hits this API key can retrieve in one call (0 means unlimited)
# @param request_options contains extra parameters to send with your query
#
# Deprecated: Please use `client.update_api_key` instead
def update_api_key(key, object, validity = 0, max_queries_per_IP_per_hour = 0, max_hits_per_query = 0, request_options = {})
if object.instance_of?(Array)
params = { :acl => object }
else
params = object
end
params['validity'] = validity.to_i if validity != 0
params['maxHitsPerQuery'] = max_hits_per_query.to_i if max_hits_per_query != 0
params['maxQueriesPerIPPerHour'] = max_queries_per_IP_per_hour.to_i if max_queries_per_IP_per_hour != 0
client.put(Protocol.index_key_uri(name, key), params.to_json, :write, request_options)
end
#
# Delete an existing user key
#
# Deprecated: Please use `client.delete_api_key` instead
def delete_api_key(key, request_options = {})
client.delete(Protocol.index_key_uri(name, key), :write, request_options)
end
#
# Send a batch request
#
def batch(request, request_options = {})
client.post(Protocol.batch_uri(name), request.to_json, :batch, request_options)
end
#
# Send a batch request and wait the end of the indexing
#
def batch!(request, request_options = {})
res = batch(request, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Search for facet values
#
# @param facet_name Name of the facet to search. It must have been declared in the
# index's`attributesForFaceting` setting with the `searchable()` modifier.
# @param facet_query Text to search for in the facet's values
# @param search_parameters An optional query to take extra search parameters into account.
# These parameters apply to index objects like in a regular search query.
# Only facet values contained in the matched objects will be returned.
# @param request_options contains extra parameters to send with your query
#
def search_for_facet_values(facet_name, facet_query, search_parameters = {}, request_options = {})
params = search_parameters.clone
params['facetQuery'] = facet_query
client.post(Protocol.search_facet_uri(name, facet_name), params.to_json, :read, request_options)
end
# deprecated
alias_method :search_facet, :search_for_facet_values
#
# Perform a search with disjunctive facets generating as many queries as number of disjunctive facets
#
# @param query the query
# @param disjunctive_facets the array of disjunctive facets
# @param params a hash representing the regular query parameters
# @param refinements a hash ("string" -> ["array", "of", "refined", "values"]) representing the current refinements
# ex: { "my_facet1" => ["my_value1", ["my_value2"], "my_disjunctive_facet1" => ["my_value1", "my_value2"] }
# @param request_options contains extra parameters to send with your query
#
def search_disjunctive_faceting(query, disjunctive_facets, params = {}, refinements = {}, request_options = {})
raise ArgumentError.new('Argument "disjunctive_facets" must be a String or an Array') unless disjunctive_facets.is_a?(String) || disjunctive_facets.is_a?(Array)
raise ArgumentError.new('Argument "refinements" must be a Hash of Arrays') if !refinements.is_a?(Hash) || !refinements.select { |k, v| !v.is_a?(Array) }.empty?
# extract disjunctive facets & associated refinements
disjunctive_facets = disjunctive_facets.split(',') if disjunctive_facets.is_a?(String)
disjunctive_refinements = {}
refinements.each do |k, v|
disjunctive_refinements[k] = v if disjunctive_facets.include?(k) || disjunctive_facets.include?(k.to_s)
end
# build queries
queries = []
## hits + regular facets query
filters = []
refinements.to_a.each do |k, values|
r = values.map { |v| "#{k}:#{v}" }
if disjunctive_refinements[k.to_s] || disjunctive_refinements[k.to_sym]
# disjunctive refinements are ORed
filters << r
else
# regular refinements are ANDed
filters += r
end
end
queries << params.merge({ :index_name => self.name, :query => query, :facetFilters => filters })
## one query per disjunctive facet (use all refinements but the current one + hitsPerPage=1 + single facet)
disjunctive_facets.each do |disjunctive_facet|
filters = []
refinements.each do |k, values|
if k.to_s != disjunctive_facet.to_s
r = values.map { |v| "#{k}:#{v}" }
if disjunctive_refinements[k.to_s] || disjunctive_refinements[k.to_sym]
# disjunctive refinements are ORed
filters << r
else
# regular refinements are ANDed
filters += r
end
end
end
queries << params.merge({
:index_name => self.name,
:query => query,
:page => 0,
:hitsPerPage => 1,
:attributesToRetrieve => [],
:attributesToHighlight => [],
:attributesToSnippet => [],
:facets => disjunctive_facet,
:facetFilters => filters,
:analytics => false
})
end
answers = client.multiple_queries(queries, { :request_options => request_options })
# aggregate answers
## first answer stores the hits + regular facets
aggregated_answer = answers['results'][0]
## others store the disjunctive facets
aggregated_answer['disjunctiveFacets'] = {}
answers['results'].each_with_index do |a, i|
next if i == 0
a['facets'].each do |facet, values|
## add the facet to the disjunctive facet hash
aggregated_answer['disjunctiveFacets'][facet] = values
## concatenate missing refinements
(disjunctive_refinements[facet.to_s] || disjunctive_refinements[facet.to_sym] || []).each do |r|
if aggregated_answer['disjunctiveFacets'][facet][r].nil?
aggregated_answer['disjunctiveFacets'][facet][r] = 0
end
end
end
end
aggregated_answer
end
#
# Alias of Algolia.list_indexes
#
# @param request_options contains extra parameters to send with your query
#
def Index.all(request_options = {})
Algolia.list_indexes(request_options)
end
#
# Search synonyms
#
# @param query the query
# @param params an optional hash of :type, :page, :hitsPerPage
# @param request_options contains extra parameters to send with your query
#
def search_synonyms(query, params = {}, request_options = {})
type = params[:type] || params['type']
type = type.join(',') if type.is_a?(Array)
page = params[:page] || params['page'] || 0
hits_per_page = params[:hitsPerPage] || params['hitsPerPage'] || 20
params = {
:query => query,
:type => type.to_s,
:page => page,
:hitsPerPage => hits_per_page
}
client.post(Protocol.search_synonyms_uri(name), params.to_json, :read, request_options)
end
#
# Get a synonym
#
# @param objectID the synonym objectID
# @param request_options contains extra parameters to send with your query
def get_synonym(objectID, request_options = {})
client.get(Protocol.synonym_uri(name, objectID), :read, request_options)
end
#
# Delete a synonym
#
# @param objectID the synonym objectID
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def delete_synonym(objectID, forward_to_replicas = false, request_options = {})
client.delete("#{Protocol.synonym_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}", :write, request_options)
end
#
# Delete a synonym and wait the end of indexing
#
# @param objectID the synonym objectID
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def delete_synonym!(objectID, forward_to_replicas = false, request_options = {})
res = delete_synonym(objectID, forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Save a synonym
#
# @param objectID the synonym objectID
# @param synonym the synonym
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def save_synonym(objectID, synonym, forward_to_replicas = false, request_options = {})
client.put("#{Protocol.synonym_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}", synonym.to_json, :write, request_options)
end
#
# Save a synonym and wait the end of indexing
#
# @param objectID the synonym objectID
# @param synonym the synonym
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def save_synonym!(objectID, synonym, forward_to_replicas = false, request_options = {})
res = save_synonym(objectID, synonym, forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Clear all synonyms
#
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def clear_synonyms(forward_to_replicas = false, request_options = {})
client.post("#{Protocol.clear_synonyms_uri(name)}?forwardToReplicas=#{forward_to_replicas}", {}, :write, request_options)
end
#
# Clear all synonyms and wait the end of indexing
#
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def clear_synonyms!(forward_to_replicas = false, request_options = {})
res = clear_synonyms(forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Add/Update an array of synonyms
#
# @param synonyms the array of synonyms to add/update
# @param forward_to_replicas should we forward the delete to replica indices
# @param replace_existing_synonyms should we replace the existing synonyms before adding the new ones
# @param request_options contains extra parameters to send with your query
#
def batch_synonyms(synonyms, forward_to_replicas = false, replace_existing_synonyms = false, request_options = {})
client.post("#{Protocol.batch_synonyms_uri(name)}?forwardToReplicas=#{forward_to_replicas}&replaceExistingSynonyms=#{replace_existing_synonyms}", synonyms.to_json, :batch, request_options)
end
#
# Add/Update an array of synonyms and wait the end of indexing
#
# @param synonyms the array of synonyms to add/update
# @param forward_to_replicas should we forward the delete to replica indices
# @param replace_existing_synonyms should we replace the existing synonyms before adding the new ones
# @param request_options contains extra parameters to send with your query
#
def batch_synonyms!(synonyms, forward_to_replicas = false, replace_existing_synonyms = false, request_options = {})
res = batch_synonyms(synonyms, forward_to_replicas, replace_existing_synonyms, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Replace synonyms in the index by the given array of synonyms
#
# @param synonyms the array of synonyms to add
# @param request_options contains extra parameters to send with your query
#
def replace_all_synonyms(synonyms, request_options = {})
forward_to_replicas = request_options[:forwardToReplicas] || request_options['forwardToReplicas'] || false
batch_synonyms(synonyms, forward_to_replicas, true, request_options)
end
#
# Replace synonyms in the index by the given array of synonyms and wait the end of indexing
#
# @param synonyms the array of synonyms to add
# @param request_options contains extra parameters to send with your query
#
def replace_all_synonyms!(synonyms, request_options = {})
res = replace_all_synonyms(synonyms, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Export the full list of synonyms
# Accepts an optional block to which it will pass each synonym
# Also returns an array with all the synonyms
#
# @param hits_per_page Amount of synonyms to retrieve on each internal request - Optional - Default: 100
# @param request_options contains extra parameters to send with your query - Optional
#
def export_synonyms(hits_per_page = 100, request_options = {}, &_block)
res = []
page = 0
loop do
curr = search_synonyms('', { :hitsPerPage => hits_per_page, :page => page }, request_options)['hits']
curr.each do |synonym|
res << synonym
yield synonym if block_given?
end
break if curr.size < hits_per_page
page += 1
end
res
end
#
# Search rules
#
# @param query the query
# @param params an optional hash of :anchoring, :context, :page, :hitsPerPage
# @param request_options contains extra parameters to send with your query
#
def search_rules(query, params = {}, request_options = {})
anchoring = params[:anchoring]
context = params[:context]
page = params[:page] || params['page'] || 0
hits_per_page = params[:hitsPerPage] || params['hitsPerPage'] || 20
params = {
:query => query,
:page => page,
:hitsPerPage => hits_per_page
}
params[:anchoring] = anchoring unless anchoring.nil?
params[:context] = context unless context.nil?
client.post(Protocol.search_rules_uri(name), params.to_json, :read, request_options)
end
#
# Get a rule
#
# @param objectID the rule objectID
# @param request_options contains extra parameters to send with your query
#
def get_rule(objectID, request_options = {})
client.get(Protocol.rule_uri(name, objectID), :read, request_options)
end
#
# Delete a rule
#
# @param objectID the rule objectID
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def delete_rule(objectID, forward_to_replicas = false, request_options = {})
client.delete("#{Protocol.rule_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}", :write, request_options)
end
#
# Delete a rule and wait the end of indexing
#
# @param objectID the rule objectID
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def delete_rule!(objectID, forward_to_replicas = false, request_options = {})
res = delete_rule(objectID, forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
return res
end
#
# Save a rule
#
# @param objectID the rule objectID
# @param rule the rule
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def save_rule(objectID, rule, forward_to_replicas = false, request_options = {})
raise ArgumentError.new('objectID must not be blank') if objectID.nil? || objectID == ''
client.put("#{Protocol.rule_uri(name, objectID)}?forwardToReplicas=#{forward_to_replicas}", rule.to_json, :write, request_options)
end
#
# Save a rule and wait the end of indexing
#
# @param objectID the rule objectID
# @param rule the rule
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def save_rule!(objectID, rule, forward_to_replicas = false, request_options = {})
res = save_rule(objectID, rule, forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
return res
end
#
# Clear all rules
#
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def clear_rules(forward_to_replicas = false, request_options = {})
client.post("#{Protocol.clear_rules_uri(name)}?forwardToReplicas=#{forward_to_replicas}", {}, :write, request_options)
end
#
# Clear all rules and wait the end of indexing
#
# @param forward_to_replicas should we forward the delete to replica indices
# @param request_options contains extra parameters to send with your query
#
def clear_rules!(forward_to_replicas = false, request_options = {})
res = clear_rules(forward_to_replicas, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
return res
end
#
# Add/Update an array of rules
#
# @param rules the array of rules to add/update
# @param forward_to_replicas should we forward the delete to replica indices
# @param clear_existing_rules should we clear the existing rules before adding the new ones
# @param request_options contains extra parameters to send with your query
#
def batch_rules(rules, forward_to_replicas = false, clear_existing_rules = false, request_options = {})
client.post("#{Protocol.batch_rules_uri(name)}?forwardToReplicas=#{forward_to_replicas}&clearExistingRules=#{clear_existing_rules}", rules.to_json, :batch, request_options)
end
#
# Add/Update an array of rules and wait the end of indexing
#
# @param rules the array of rules to add/update
# @param forward_to_replicas should we forward the delete to replica indices
# @param clear_existing_rules should we clear the existing rules before adding the new ones
# @param request_options contains extra parameters to send with your query
#
def batch_rules!(rules, forward_to_replicas = false, clear_existing_rules = false, request_options = {})
res = batch_rules(rules, forward_to_replicas, clear_existing_rules, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
return res
end
#
# Replace rules in the index by the given array of rules
#
# @param rules the array of rules to add
# @param request_options contains extra parameters to send with your query
#
def replace_all_rules(rules, request_options = {})
forward_to_replicas = request_options[:forwardToReplicas] || request_options['forwardToReplicas'] || false
batch_rules(rules, forward_to_replicas, true, request_options)
end
#
# Replace rules in the index by the given array of rules and wait the end of indexing
#
# @param rules the array of rules to add
# @param request_options contains extra parameters to send with your query
#
def replace_all_rules!(rules, request_options = {})
res = replace_all_rules(rules, request_options)
wait_task(res['taskID'], WAIT_TASK_DEFAULT_TIME_BEFORE_RETRY, request_options)
res
end
#
# Export the full list of rules
# Accepts an optional block to which it will pass each rule
# Also returns an array with all the rules
#
# @param hits_per_page Amount of rules to retrieve on each internal request - Optional - Default: 100
# @param request_options contains extra parameters to send with your query - Optional
#
def export_rules(hits_per_page = 100, request_options = {}, &_block)
res = []
page = 0
loop do
curr = search_rules('', { :hits_per_page => hits_per_page, :page => page }, request_options)['hits']
curr.each do |rule|
res << rule
yield rule if block_given?
end
break if curr.size < hits_per_page
page += 1
end
res
end
# Deprecated
alias_method :get_user_key, :get_api_key
alias_method :list_user_keys, :list_api_keys
alias_method :add_user_key, :add_api_key
alias_method :update_user_key, :update_api_key
alias_method :delete_user_key, :delete_api_key
private
def check_array(object)
raise ArgumentError.new('argument must be an array of objects') if !object.is_a?(Array)
end
def check_object(object, in_array = false)
case object
when Array
raise ArgumentError.new(in_array ? 'argument must be an array of objects' : 'argument must not be an array')
when String, Integer, Float, TrueClass, FalseClass, NilClass
raise ArgumentError.new("argument must be an #{'array of' if in_array} object, got: #{object.inspect}")
else
# ok
end
end
def get_objectID(object, objectID = nil)
check_object(object)
objectID ||= object[:objectID] || object['objectID']
raise ArgumentError.new("Missing 'objectID'") if objectID.nil?
return objectID
end
def build_batch(action, objects, with_object_id = false)
check_array(objects)
{
:requests => objects.map { |object|
check_object(object, true)
h = { :action => action, :body => object }
h[:objectID] = get_objectID(object).to_s if with_object_id
h
}
}
end
def sanitized_delete_by_query_params(params)
params ||= {}
params.delete(:hitsPerPage)
params.delete('hitsPerPage')
params.delete(:attributesToRetrieve)
params.delete('attributesToRetrieve')
params
end
end
|
rmagick/rmagick | examples/identify.rb | Magick.Image.identify | ruby | def identify
printf 'Image: '
puts "#{base_filename}=>" if base_filename != filename
puts filename + "\n"
puts "\tFormat: #{format}\n"
puts "\tGeometry: #{columns}x#{rows}\n"
puts "\tClass: #{class_type}\n"
puts "\tType: #{image_type}\n"
puts "\tEndianess: #{endian}\n"
puts "\tColorspace: #{colorspace}\n"
puts "\tChannelDepth:\n"
color_space = gray? ? Magick::GRAYColorspace : colorspace
case color_space
when Magick::RGBColorspace
puts "\t\tRed: #{channel_depth(Magick::RedChannel)}-bits\n"
puts "\t\tGreen: #{channel_depth(Magick::GreenChannel)}-bits\n"
puts "\t\tBlue: #{channel_depth(Magick::BlueChannel)}-bits\n"
puts "\t\tOpacity: #{channel_depth(Magick::OpacityChannel)}-bits\n" if matte
when Magick::CMYKColorspace
puts "\t\tCyan : #{channel_depth(Magick::CyanChannel)}-bits\n"
puts "\t\tMagenta: #{channel_depth(Magick::MagentaChannel)}-bits\n"
puts "\t\tYellow: #{channel_depth(Magick::YellowChannel)}-bits\n"
puts "\t\tBlack: #{channel_depth(Magick::BlackChannel)}-bits\n"
puts "\t\tOpacity: #{channel_depth(Magick::OpacityChannel)}-bits\n" if matte
when Magick::GRAYColorspace
puts "\t\tGray: #{channel_depth(Magick::GrayChannel)}-bits\n"
puts "\t\tOpacity: #{channel_depth(Magick::OpacityChannel)}-bits\n" if matte
end
scale = Magick::QuantumRange / (Magick::QuantumRange >> (Magick::MAGICKCORE_QUANTUM_DEPTH - channel_depth))
puts "\tChannel statistics:\n"
case color_space
when Magick::RGBColorspace
puts "\t\tRed:\n"
puts "\t\t\tMin: " + sprintf("%u (%g)\n", channel_extrema(Magick::RedChannel)[0] / scale, channel_extrema(Magick::RedChannel)[0] / Magick::QuantumRange)
puts "\t\t\tMax: " + sprintf("%u (%g)\n", channel_extrema(Magick::RedChannel)[1] / scale, channel_extrema(Magick::RedChannel)[1] / Magick::QuantumRange)
puts "\t\t\tMean: " + sprintf("%g (%g)\n", channel_mean(Magick::RedChannel)[0] / scale, channel_mean(Magick::RedChannel)[0] / Magick::QuantumRange)
puts "\t\t\tStandard deviation: " + sprintf("%g (%g)\n", channel_mean(Magick::RedChannel)[1] / scale, channel_mean(Magick::RedChannel)[1] / Magick::QuantumRange)
puts "\t\tGreen:\n"
puts "\t\t\tMin: " + sprintf("%u (%g)\n", channel_extrema(Magick::GreenChannel)[0] / scale, channel_extrema(Magick::GreenChannel)[0] / Magick::QuantumRange)
puts "\t\t\tMax: " + sprintf("%u (%g)\n", channel_extrema(Magick::GreenChannel)[1] / scale, channel_extrema(Magick::GreenChannel)[1] / Magick::QuantumRange)
puts "\t\t\tMean: " + sprintf("%g (%g)\n", channel_mean(Magick::GreenChannel)[0] / scale, channel_mean(Magick::GreenChannel)[0] / Magick::QuantumRange)
puts "\t\t\tStandard deviation: " + sprintf("%g (%g)\n", channel_mean(Magick::GreenChannel)[1] / scale, channel_mean(Magick::GreenChannel)[1] / Magick::QuantumRange)
puts "\t\tBlue:\n"
puts "\t\t\tMin: " + sprintf("%u (%g)\n", channel_extrema(Magick::BlueChannel)[0] / scale, channel_extrema(Magick::BlueChannel)[0] / Magick::QuantumRange)
puts "\t\t\tMax: " + sprintf("%u (%g)\n", channel_extrema(Magick::BlueChannel)[1] / scale, channel_extrema(Magick::BlueChannel)[1] / Magick::QuantumRange)
puts "\t\t\tMean: " + sprintf("%g (%g)\n", channel_mean(Magick::BlueChannel)[0] / scale, channel_mean(Magick::BlueChannel)[0] / Magick::QuantumRange)
puts "\t\t\tStandard deviation: " + sprintf("%g (%g)\n", channel_mean(Magick::BlueChannel)[1] / scale, channel_mean(Magick::BlueChannel)[1] / Magick::QuantumRange)
when Magick::CMYKColorspace
puts "\t\tCyan:\n"
puts "\t\t\tMin: " + sprintf("%u (%g)\n", channel_extrema(Magick::CyanChannel)[0] / scale, channel_extrema(Magick::CyanChannel)[0] / Magick::QuantumRange)
puts "\t\t\tMax: " + sprintf("%u (%g)\n", channel_extrema(Magick::CyanChannel)[1] / scale, channel_extrema(Magick::CyanChannel)[1] / Magick::QuantumRange)
puts "\t\t\tMean: " + sprintf("%g (%g)\n", channel_mean(Magick::CyanChannel)[0] / scale, channel_mean(Magick::CyanChannel)[0] / Magick::QuantumRange)
puts "\t\t\tStandard deviation: " + sprintf("%g (%g)\n", channel_mean(Magick::CyanChannel)[1] / scale, channel_mean(Magick::CyanChannel)[1] / Magick::QuantumRange)
puts "\t\tMagenta:\n"
puts "\t\t\tMin: " + sprintf("%u (%g)\n", channel_extrema(Magick::MagentaChannel)[0] / scale, channel_extrema(Magick::MagentaChannel)[0] / Magick::QuantumRange)
puts "\t\t\tMax: " + sprintf("%u (%g)\n", channel_extrema(Magick::MagentaChannel)[1] / scale, channel_extrema(Magick::MagentaChannel)[1] / Magick::QuantumRange)
puts "\t\t\tMean: " + sprintf("%g (%g)\n", channel_mean(Magick::MagentaChannel)[0] / scale, channel_mean(Magick::MagentaChannel)[0] / Magick::QuantumRange)
puts "\t\t\tStandard deviation: " + sprintf("%g (%g)\n", channel_mean(Magick::MagentaChannel)[1] / scale, channel_mean(Magick::MagentaChannel)[1] / Magick::QuantumRange)
puts "\t\tYellow:\n"
puts "\t\t\tMin: " + sprintf("%u (%g)\n", channel_extrema(Magick::YellowChannel)[0] / scale, channel_extrema(Magick::YellowChannel)[0] / Magick::QuantumRange)
puts "\t\t\tMax: " + sprintf("%u (%g)\n", channel_extrema(Magick::YellowChannel)[1] / scale, channel_extrema(Magick::YellowChannel)[1] / Magick::QuantumRange)
puts "\t\t\tMean: " + sprintf("%g (%g)\n", channel_mean(Magick::YellowChannel)[0] / scale, channel_mean(Magick::YellowChannel)[0] / Magick::QuantumRange)
puts "\t\t\tStandard deviation: " + sprintf("%g (%g)\n", channel_mean(Magick::YellowChannel)[1] / scale, channel_mean(Magick::YellowChannel)[1] / Magick::QuantumRange)
puts "\t\tBlack:\n"
puts "\t\t\tMin: " + sprintf("%u (%g)\n", channel_extrema(Magick::BlackChannel)[0] / scale, channel_extrema(Magick::BlackChannel)[0] / Magick::QuantumRange)
puts "\t\t\tMax: " + sprintf("%u (%g)\n", channel_extrema(Magick::BlackChannel)[1] / scale, channel_extrema(Magick::BlackChannel)[1] / Magick::QuantumRange)
puts "\t\t\tMean: " + sprintf("%g (%g)\n", channel_mean(Magick::BlackChannel)[0] / scale, channel_mean(Magick::BlackChannel)[0] / Magick::QuantumRange)
puts "\t\t\tStandard deviation: " + sprintf("%g (%g)\n", channel_mean(Magick::BlackChannel)[1] / scale, channel_mean(Magick::BlackChannel)[1] / Magick::QuantumRange)
when Magick::GRAYColorspace
puts "\t\tGray:\n"
puts "\t\t\tMin: " + sprintf("%u (%g)\n", channel_extrema(Magick::GrayChannel)[0] / scale, channel_extrema(Magick::GrayChannel)[0] / Magick::QuantumRange)
puts "\t\t\tMax: " + sprintf("%u (%g)\n", channel_extrema(Magick::GrayChannel)[1] / scale, channel_extrema(Magick::GrayChannel)[1] / Magick::QuantumRange)
puts "\t\t\tMean: " + sprintf("%g (%g)\n", channel_mean(Magick::GrayChannel)[0] / scale, channel_mean(Magick::GrayChannel)[0] / Magick::QuantumRange)
puts "\t\t\tStandard deviation: " + sprintf("%g (%g)\n", channel_mean(Magick::GrayChannel)[1] / scale, channel_mean(Magick::GrayChannel)[1] / Magick::QuantumRange)
end
if matte
puts "\t\tOpacity:\n"
puts "\t\t\tMin: " + sprintf("%u (%g)\n", channel_extrema(Magick::OpacityChannel)[0] / scale, channel_extrema(Magick::OpacityChannel)[0] / Magick::QuantumRange)
puts "\t\t\tMax: " + sprintf("%u (%g)\n", channel_extrema(Magick::OpacityChannel)[1] / scale, channel_extrema(Magick::OpacityChannel)[1] / Magick::QuantumRange)
puts "\t\t\tMean:" + sprintf("%u (%g)\n", channel_mean(Magick::OpacityChannel)[0] / scale, channel_mean(Magick::OpacityChannel)[0] / Magick::QuantumRange)
puts "\t\t\tStandard deviation:" + sprintf("%u (%g)\n", channel_mean(Magick::OpacityChannel)[1] / scale, channel_mean(Magick::OpacityChannel)[1] / Magick::QuantumRange)
end
if class_type == Magick::DirectClass
puts "\tColors: #{total_colors}\n"
elsif total_colors <= colors
puts "\tColors: #{colors}\n"
else
puts "\tColors: #{total_colors}=>#{colors}\n"
end
# Histogram goes here
puts "\tMean error per pixel: #{mean_error_per_pixel}\n" if mean_error_per_pixel != 0.0
puts "\tNormalized mean error: #{normalized_mean_error}\n" if normalized_mean_error != 0.0
puts "\tNormalized maximum error: #{normalized_maximum_error}\n" if normalized_maximum_error != 0.0
puts "\tRendering-intent: #{rendering_intent}\n"
puts "\tGamma: #{gamma}\n" if gamma != 0.0
chrom = chromaticity
if chrom.red_primary.x != 0.0 || chrom.green_primary.x != 0.0 || chrom.blue_primary.x != 0.0 || chrom.white_point.x != 0.0
puts "\tChromaticity:\n"
puts "\t\tred primary: (#{sprintf('%g,%g', chrom.red_primary.x, chrom.red_primary.y)})\n"
puts "\t\tgreen primary: (#{sprintf('%g,%g', chrom.green_primary.x, chrom.green_primary.y)})\n"
puts "\t\tblue primary: (#{sprintf('%g,%g', chrom.blue_primary.x, chrom.blue_primary.y)})\n"
puts "\t\twhite point: (#{sprintf('%g,%g', chrom.white_point.x, chrom.white_point.y)})\n"
end
ex_info = extract_info
puts "\tTile geometry: #{ex_info.width}x#{ex_info.height}+#{ex_info.x}+#{ex_info.y}\n" if ex_info.width * ex_info.height != 0.0
puts "\tResolution: #{sprintf('%gx%g', x_resolution, y_resolution)}\n" if x_resolution != 0.0 && y_resolution != 0.0
puts "\tUnits: #{units}\n"
size = filesize
if size >= 1_048_576
puts "\tFilesize: #{sprintf('%.1f', (size / 1_048_576.0))}mb\n"
elsif size >= 1024
puts "\tFilesize: #{sprintf('%.0f', (size / 1024.0))}kb\n"
else
puts "\tFilesize: #{size}b\n"
end
puts "\tInterlace: #{interlace}\n"
puts "\tBackground Color: #{background_color}\n"
puts "\tBorder Color: #{border_color}\n"
puts "\tMatte Color: #{matte_color}\n"
pg = page
puts "\tPage geometry: #{pg.width}x#{pg.height}+#{pg.x}+#{pg.y}\n" if pg.width != 0 || pg.height != 0 || pg.x != 0 || pg.y != 0
puts "\tDispose: #{dispose}\n"
puts "\tDelay: #{delay}\n" if delay != 0
puts "\tIterations: #{iterations}\n" unless iterations == 1
puts "\tScene: #{scene}\n" if scene != 0
puts "\tCompression: #{compression}\n"
puts "\tQuality: #{quality}\n" unless quality.zero?
puts "\tOrientation: #{orientation}\n"
puts "\tMontage: #{montage}\n" if montage
signature # compute but ignore - will be displayed along with the other properties
properties.each do |prop, value|
next if prop[0, 1] == '['
puts "\t#{prop}: #{value}\n"
end
clip_path = self['8BIM:1999,2998:#1']
puts "\tClipping path: #{clip_path}\n" if clip_path
each_profile do |name, value|
puts "\tProfile-#{name}: #{value.length}\n"
next unless name == 'exif'
exif_attrs = get_exif_by_entry
exif_attrs.each do |attr|
puts "\t\t#{attr[0]}: #{attr[1]}\n"
end
end
puts "\tTainted: True\n" if changed?
puts "\tTainted: False\n" unless changed?
puts "\tVersion: #{Magick::Version}\n"
puts "\t #{Magick::Magick_version}\n"
end | Print information similar to the identify -verbose command | train | https://github.com/rmagick/rmagick/blob/ef6688ed9d76bf123c2ea1a483eff8635051adb7/examples/identify.rb#L6-L156 | class Image
# Print information similar to the identify -verbose command
end
|
berkshelf/solve | lib/solve/ruby_solver.rb | Solve.RubySolver.requirement_satisfied_by? | ruby | def requirement_satisfied_by?(requirement, activated, spec)
version = spec.version
return false unless requirement.constraint.satisfies?(version)
shared_possibility_versions = possibility_versions(requirement, activated)
return false if !shared_possibility_versions.empty? && !shared_possibility_versions.include?(version)
true
end | Callback required by Molinillo
Determines whether the given `requirement` is satisfied by the given
`spec`, in the context of the current `activated` dependency graph.
@param [Object] requirement
@param [DependencyGraph] activated the current dependency graph in the
resolution process.
@param [Object] spec
@return [Boolean] whether `requirement` is satisfied by `spec` in the
context of the current `activated` dependency graph. | train | https://github.com/berkshelf/solve/blob/a0e03ede13e2f66b8dd6d0d34c9c9db70fba94d2/lib/solve/ruby_solver.rb#L171-L177 | class RubySolver
class << self
# The timeout (in seconds) to use when resolving graphs. Default is 10. This can be
# configured by setting the SOLVE_TIMEOUT environment variable.
#
# @return [Integer]
def timeout
seconds = 30 unless ( seconds = ENV["SOLVE_TIMEOUT"] )
seconds.to_i * 1_000
end
# For optional solver engines, this attempts to load depenencies. The
# RubySolver is a non-optional component, so this is a no-op
def activate
true
end
end
# Graph object with references to all known artifacts and dependency
# constraints.
#
# @return [Solve::Graph]
attr_reader :graph
# @example Demands are Arrays of Arrays with an artifact name and optional constraint:
# [['nginx', '= 1.0.0'], ['mysql']]
# @return [Array<String>, Array<Array<String, String>>] demands
attr_reader :demands_array
# @example Basic use:
# graph = Solve::Graph.new
# graph.artifacts("mysql", "1.2.0")
# demands = [["mysql"]]
# RubySolver.new(graph, demands)
# @param [Solve::Graph] graph
# @param [Array<String>, Array<Array<String, String>>] demands
def initialize(graph, demands, options = {})
@graph = graph
@demands_array = demands
@timeout_ms = self.class.timeout
@ui = options[:ui] # could be nil, but that's okay
@dependency_source = options[:dependency_source] || "user-specified dependency"
@molinillo_graph = Molinillo::DependencyGraph.new
@resolver = Molinillo::Resolver.new(self, self)
end
# The problem demands given as Demand model objects
# @return [Array<Solve::Demand>]
def demands
demands_array.map do |name, constraint|
Demand.new(self, name, constraint)
end
end
# @option options [Boolean] :sorted
# return the solution as a sorted list instead of a Hash
#
# @return [Hash, List] Returns a hash like { "Artifact Name" => "Version",... }
# unless the :sorted option is true, then it returns a list like [["Artifact Name", "Version],...]
# @raise [Errors::NoSolutionError] when the demands cannot be met for the
# given graph.
# @raise [Errors::UnsortableSolutionError] when the :sorted option is true
# and the demands have a solution, but the solution contains a cyclic
# dependency
def resolve(options = {})
@ui = options[:ui] if options[:ui]
solved_graph = resolve_with_error_wrapping
solution = solved_graph.map(&:payload)
unsorted_solution = solution.inject({}) do |stringified_soln, artifact|
stringified_soln[artifact.name] = artifact.version.to_s
stringified_soln
end
if options[:sorted]
build_sorted_solution(unsorted_solution)
else
unsorted_solution
end
end
###
# Molinillo Callbacks
#
# Molinillo calls back to this class to get information about our
# dependency model objects. An abstract implementation is provided at
# https://github.com/CocoaPods/Molinillo/blob/master/lib/molinillo/modules/specification_provider.rb
#
###
# Callback required by Molinillo, called when the solve starts
# @return [Integer]
def progress_rate
1
end
# Callback required by Molinillo, called when the solve starts
# @return nil
def before_resolution
@ui.say("Starting dependency resolution") if @ui
end
# Callback required by Molinillo, called when the solve is complete.
# @return nil
def after_resolution
@ui.say("Finished dependency resolution") if @ui
end
# Callback required by Molinillo, called when resolving every progress_rate
# @return nil
def indicate_progress
nil
end
# Callback required by Molinillo, gives debug information about the solution
# @return nil
def debug(current_resolver_depth = 0)
# debug info will be returned if you call yield here, but it seems to be
# broken in current Molinillo
@ui.say(yield) if @ui
end
include Molinillo::SpecificationProvider
# Callback required by Molinillo
# Search for the specifications that match the given dependency.
# The specifications in the returned array will be considered in reverse
# order, so the latest version ought to be last.
# @note This method should be 'pure', i.e. the return value should depend
# only on the `dependency` parameter.
#
# @param [Object] dependency
# @return [Array<Solve::Artifact>] the artifacts that match the dependency.
def search_for(dependency)
# This array gets mutated by Molinillo; it's okay because sort returns a
# new array.
graph.versions(dependency.name, dependency.constraint).sort
end
# Callback required by Molinillo
# Returns the dependencies of `specification`.
# @note This method should be 'pure', i.e. the return value should depend
# only on the `specification` parameter.
#
# @param [Object] specification
# @return [Array<Solve::Dependency>] the dependencies of the given artifact
def dependencies_for(specification)
specification.dependencies
end
# Callback required by Molinillo
# Determines whether the given `requirement` is satisfied by the given
# `spec`, in the context of the current `activated` dependency graph.
#
# @param [Object] requirement
# @param [DependencyGraph] activated the current dependency graph in the
# resolution process.
# @param [Object] spec
# @return [Boolean] whether `requirement` is satisfied by `spec` in the
# context of the current `activated` dependency graph.
# Searches the current dependency graph to find previously activated
# requirements for the current artifact.
#
# @param [Object] requirement
# @param [DependencyGraph] activated the current dependency graph in the
# resolution process.
# @return [Array<Semverse::Version> the list of currently activated versions
# of this requirement
def possibility_versions(requirement, activated)
activated.vertices.values.flat_map do |vertex|
next unless vertex.payload
next unless vertex.name == requirement.name
if vertex.payload.respond_to?(:possibilities)
vertex.payload.possibilities.map(&:version)
else
vertex.payload.version
end
end.compact
end
private :possibility_versions
# Callback required by Molinillo
# Returns the name for the given `dependency`.
# @note This method should be 'pure', i.e. the return value should depend
# only on the `dependency` parameter.
#
# @param [Object] dependency
# @return [String] the name for the given `dependency`.
def name_for(dependency)
dependency.name
end
# Callback required by Molinillo
# @return [String] the name of the source of explicit dependencies, i.e.
# those passed to {Resolver#resolve} directly.
def name_for_explicit_dependency_source
@dependency_source
end
# Callback required by Molinillo
# Sort dependencies so that the ones that are easiest to resolve are first.
# Easiest to resolve is (usually) defined by:
# 1) Is this dependency already activated?
# 2) How relaxed are the requirements?
# 3) Are there any conflicts for this dependency?
# 4) How many possibilities are there to satisfy this dependency?
#
# @param [Array<Object>] dependencies
# @param [DependencyGraph] activated the current dependency graph in the
# resolution process.
# @param [{String => Array<Conflict>}] conflicts
# @return [Array<Solve::Dependency>] the dependencies sorted by preference.
def sort_dependencies(dependencies, activated, conflicts)
dependencies.sort_by do |dependency|
name = name_for(dependency)
[
activated.vertex_named(name).payload ? 0 : 1,
conflicts[name] ? 0 : 1,
search_for(dependency).count,
]
end
end
# Callback required by Molinillo
# Returns whether this dependency, which has no possible matching
# specifications, can safely be ignored.
#
# @param [Object] dependency
# @return [Boolean] whether this dependency can safely be skipped.
def allow_missing?(dependency)
false
end
private
def resolve_with_error_wrapping
@resolver.resolve(demands, @molinillo_graph)
rescue Molinillo::VersionConflict, Molinillo::CircularDependencyError => e
raise Solve::Errors::NoSolutionError.new(e.message)
end
def build_sorted_solution(unsorted_solution)
nodes = Hash.new
unsorted_solution.each do |name, version|
nodes[name] = @graph.artifact(name, version).dependencies.map(&:name)
end
# Modified from http://ruby-doc.org/stdlib-1.9.3/libdoc/tsort/rdoc/TSort.html
class << nodes
include TSort
alias tsort_each_node each_key
def tsort_each_child(node, &block)
fetch(node).each(&block)
end
end
begin
sorted_names = nodes.tsort
rescue TSort::Cyclic => e
raise Solve::Errors::UnsortableSolutionError.new(e, unsorted_solution)
end
sorted_names.map do |artifact|
[artifact, unsorted_solution[artifact]]
end
end
end
|
moneta-rb/moneta | lib/moneta/expires.rb | Moneta.Expires.merge! | ruby | def merge!(pairs, options={})
expires = expires_at(options)
options = Utils.without(options, :expires)
block = if block_given?
lambda do |key, old_entry, entry|
old_entry = invalidate_entry(key, old_entry)
if old_entry.nil?
entry # behave as if no replace is happening
else
old_value, _ = old_entry
new_value, _ = entry
new_entry(yield(key, old_value, new_value), expires)
end
end
end
entry_pairs = pairs.map do |key, value|
[key, new_entry(value, expires)]
end
@adapter.merge!(entry_pairs, options, &block)
self
end | (see Defaults#merge!) | train | https://github.com/moneta-rb/moneta/blob/26a118c8b2c93d11257f4a5fe9334a8157f4db47/lib/moneta/expires.rb#L124-L146 | class Expires < Proxy
include ExpiresSupport
# @param [Moneta store] adapter The underlying store
# @param [Hash] options
# @option options [String] :expires Default expiration time
def initialize(adapter, options = {})
raise 'Store already supports feature :expires' if adapter.supports?(:expires)
super
self.default_expires = options[:expires]
end
# (see Proxy#key?)
def key?(key, options = {})
# Transformer might raise exception
load_entry(key, options) != nil
rescue Exception
super(key, Utils.without(options, :expires))
end
# (see Proxy#load)
def load(key, options = {})
return super if options.include?(:raw)
value, expires = load_entry(key, options)
value
end
# (see Proxy#store)
def store(key, value, options = {})
return super if options.include?(:raw)
expires = expires_at(options)
super(key, new_entry(value, expires), Utils.without(options, :expires))
value
end
# (see Proxy#delete)
def delete(key, options = {})
return super if options.include?(:raw)
value, expires = super
value if !expires || Time.now <= Time.at(expires)
end
# (see Proxy#store)
def create(key, value, options = {})
return super if options.include?(:raw)
expires = expires_at(options)
@adapter.create(key, new_entry(value, expires), Utils.without(options, :expires))
end
# (see Defaults#values_at)
def values_at(*keys, **options)
return super if options.include?(:raw)
new_expires = expires_at(options, nil)
options = Utils.without(options, :expires)
with_updates(options) do |updates|
keys.zip(@adapter.values_at(*keys, **options)).map do |key, entry|
entry = invalidate_entry(key, entry, new_expires) do |new_entry|
updates[key] = new_entry
end
next if entry.nil?
value, _ = entry
value
end
end
end
# (see Defaults#fetch_values)
def fetch_values(*keys, **options)
return super if options.include?(:raw)
new_expires = expires_at(options, nil)
options = Utils.without(options, :expires)
substituted = {}
block = if block_given?
lambda do |key|
substituted[key] = true
yield key
end
end
with_updates(options) do |updates|
keys.zip(@adapter.fetch_values(*keys, **options, &block)).map do |key, entry|
next entry if substituted[key]
entry = invalidate_entry(key, entry, new_expires) do |new_entry|
updates[key] = new_entry
end
if entry.nil?
value = if block_given?
yield key
end
else
value, _ = entry
end
value
end
end
end
# (see Defaults#slice)
def slice(*keys, **options)
return super if options.include?(:raw)
new_expires = expires_at(options, nil)
options = Utils.without(options, :expires)
with_updates(options) do |updates|
@adapter.slice(*keys, **options).map do |key, entry|
entry = invalidate_entry(key, entry, new_expires) do |new_entry|
updates[key] = new_entry
end
next if entry.nil?
value, _ = entry
[key, value]
end.reject(&:nil?)
end
end
# (see Defaults#merge!)
private
def load_entry(key, options)
new_expires = expires_at(options, nil)
options = Utils.without(options, :expires)
entry = @adapter.load(key, options)
invalidate_entry(key, entry, new_expires) do |new_entry|
@adapter.store(key, new_entry, options)
end
end
def invalidate_entry(key, entry, new_expires = nil)
if entry != nil
value, expires = entry
if expires && Time.now > Time.at(expires)
delete(key)
entry = nil
elsif new_expires != nil
yield new_entry(value, new_expires) if block_given?
end
end
entry
end
def new_entry(value, expires)
if expires
[value, expires.to_r]
elsif Array === value || value == nil
[value]
else
value
end
end
def with_updates(options)
updates = {}
yield(updates).tap do
@adapter.merge!(updates, options) unless updates.empty?
end
end
end
|
iyuuya/jkf | lib/jkf/parser/kif.rb | Jkf::Parser.Kif.make_hand | ruby | def make_hand(str)
# Kifu for iPhoneは半角スペース区切り
ret = { "FU" => 0, "KY" => 0, "KE" => 0, "GI" => 0, "KI" => 0, "KA" => 0, "HI" => 0 }
return ret if str.empty?
str.split(/[ ]/).each do |kind|
next if kind.empty?
ret[kind2csa(kind[0])] = kind.length == 1 ? 1 : kan2n2(kind[1..-1])
end
ret
end | generate motigoma | train | https://github.com/iyuuya/jkf/blob/4fd229c50737cab7b41281238880f1414e55e061/lib/jkf/parser/kif.rb#L614-L625 | class Kif < Base
include Kifuable
protected
# kifu : skipline* header* initialboard? header* split? moves fork* nl?
def parse_root
@input += "\n" unless @input.end_with?("\n")
s0 = @current_pos
s1 = []
s2 = parse_skipline
while s2 != :failed
s1 << s2
s2 = parse_skipline
end
s2 = []
s3 = parse_header
while s3 != :failed
s2 << s3
s3 = parse_header
end
s3 = parse_initialboard
s3 = nil if s3 == :failed
s4 = []
s5 = parse_header
while s5 != :failed
s4 << s5
s5 = parse_header
end
parse_split
s6 = parse_moves
if s6 != :failed
s7 = []
s8 = parse_fork
while s8 != :failed
s7 << s8
s8 = parse_fork
end
parse_nl
@reported_pos = s0
s0 = transform_root(s2, s3, s4, s6, s7)
else
@current_pos = s0
s0 = :failed
end
s0
end
# header : [^:\r\n]+ ":" nonls nl
# | turn "手番" nl
# | "盤面回転" nl
def parse_header
s0 = @current_pos
s2 = match_regexp(/^[^:\r\n]/)
if s2 != :failed
s1 = []
while s2 != :failed
s1 << s2
s2 = match_regexp(/^[^:\r\n]/)
end
else
s1 = :failed
end
if s1 != :failed
if match_str(":") != :failed
s3 = parse_nonls
if parse_nl != :failed
@reported_pos = s0
s1 = { "k" => s1.join, "v" => s3.join }
s0 = s1
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
if s0 == :failed
s0 = @current_pos
s1 = parse_turn
if s1 != :failed
if match_str("手番") != :failed
if parse_nl != :failed
@reported_pos = s0
s0 = { "k" => "手番", "v" => s1 }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
if s0 == :failed
s0 = @current_pos
if match_str("盤面回転") != :failed
if parse_nl != :failed
@reported_pos = s0
s0 = nil
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
end
end
s0
end
# turn : [先後上下]
def parse_turn
match_regexp(/^[先後上下]/)
end
# split : "手数----指手--" "-------消費時間--"? nl
def parse_split
s0 = @current_pos
s1 = match_str("手数----指手--")
if s1 != :failed
s2 = match_str("-------消費時間--")
s2 = nil if s2 == :failed
s3 = parse_nl
if s3 != :failed
s0 = [s1, s2, s3]
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# moves : firstboard split? move* result?
def parse_moves
s0 = @current_pos
s1 = parse_firstboard
if s1 != :failed
parse_split
s2 = []
s3 = parse_move
while s3 != :failed
s2 << s3
s3 = parse_move
end
parse_result
@reported_pos = s0
s0 = s2.unshift(s1)
else
@current_pos = s0
s0 = :failed
end
s0
end
# firstboard : comment* pointer?
def parse_firstboard
s0 = @current_pos
s1 = []
s2 = parse_comment
while s2 != :failed
s1 << s2
s2 = parse_comment
end
parse_pointer
@reported_pos = s0
s0 = s1.empty? ? {} : { "comments" => s1 }
s0
end
# move : line comment* pointer?
def parse_move
s0 = @current_pos
s1 = parse_line
if s1 != :failed
s2 = []
s3 = parse_comment
while s3 != :failed
s2 << s3
s3 = parse_comment
end
parse_pointer
@reported_pos = s0
s0 = transform_move(s1, s2)
else
@current_pos = s0
s0 = :failed
end
s0
end
# line : " "* te " "* (fugou from | [^\r\n ]*) " "* time? "+"? nl
def parse_line
s0 = @current_pos
match_spaces
s2 = parse_te
if s2 != :failed
match_spaces
s4 = @current_pos
s5 = parse_fugou
if s5 != :failed
s6 = parse_from
if s6 != :failed
@reported_pos = s4
s4 = transform_teban_fugou_from(s2, s5, s6)
else
@current_pos = s4
s4 = :failed
end
else
@current_pos = s4
s4 = :failed
end
if s4 == :failed
s4 = @current_pos
s5 = []
s6 = match_regexp(/^[^\r\n ]/)
while s6 != :failed
s5 << s6
s6 = match_regexp(/^[^\r\n ]/)
end
@reported_pos = s4
s4 = s5.join
end
if s4 != :failed
match_spaces
s6 = parse_time
s6 = nil if s6 == :failed
match_str("+")
if parse_nl != :failed
@reported_pos = s0
s0 = { "move" => s4, "time" => s6 }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# te : [0-9]+
def parse_te
match_digits!
end
# fugou : place piece "成"?
def parse_fugou
s0 = @current_pos
s1 = parse_place
if s1 != :failed
s2 = parse_piece
if s2 != :failed
s3 = match_str("成")
s3 = nil if s3 == :failed
@reported_pos = s0
s0 = { "to" => s1, "piece" => s2, "promote" => !!s3 }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# place : num numkan | "同 "
def parse_place
s0 = @current_pos
s1 = parse_num
if s1 != :failed
s2 = parse_numkan
if s2 != :failed
@reported_pos = s0
s0 = { "x" => s1, "y" => s2 }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
if s0 == :failed
s0 = @current_pos
s1 = match_str("同 ")
if s1 != :failed
@reported_pos = s0
s1 = nil
end
s0 = s1
end
s0
end
# from : "打" | "(" [1-9] [1-9] ")"
def parse_from
s0 = @current_pos
s1 = match_str("打")
if s1 != :failed
@reported_pos = s0
s1 = nil
end
s0 = s1
if s0 == :failed
s0 = @current_pos
if match_str("(") != :failed
s2 = match_regexp(/^[1-9]/)
if s2 != :failed
s3 = match_regexp(/^[1-9]/)
if s3 != :failed
if match_str(")") != :failed
@reported_pos = s0
s0 = { "x" => s2.to_i, "y" => s3.to_i }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
end
s0
end
# time : "(" " "* ms " "* "/" " "* (hms | ms) " "* ")"
def parse_time
s0 = @current_pos
if match_str("(") != :failed
match_spaces
s3 = parse_ms
if s3 != :failed
match_spaces
if match_str("/") != :failed
match_spaces
s5 = parse_hms
s5 = parse_ms(with_hour: true) if s5 == :failed
if s5 != :failed
match_spaces
if match_str(")") != :failed
@reported_pos = s0
s0 = { "now" => s3, "total" => s5 }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# hms : [0-9]+ ":" [0-9]+ ":" [0-9]+
def parse_hms
s0 = @current_pos
s1 = match_digits!
if s1 != :failed
if match_str(":") != :failed
s3 = match_digits!
if s3 != :failed
if match_str(":") != :failed
s5 = match_digits!
if s5 != :failed
@reported_pos = s0
s0 = { "h" => s1.join.to_i, "m" => s3.join.to_i, "s" => s5.join.to_i }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# ms : [0-9]+ ":" [0-9]+
def parse_ms(with_hour: false)
s0 = @current_pos
s1 = match_digits!
if s1 != :failed
if match_str(":") != :failed
s3 = match_digits!
if s3 != :failed
@reported_pos = s0
m = s1.join.to_i
s = s3.join.to_i
if with_hour
h = m / 60
m = m % 60
s0 = { "h" => h, "m" => m, "s" => s }
else
s0 = { "m" => m, "s" => s }
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# comment : "*" nonls nl | "&" nonls nl
def parse_comment
s0 = @current_pos
if match_str("*") != :failed
s2 = parse_nonls
if parse_nl != :failed
@reported_pos = s0
s0 = s2.join
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
if s0 == :failed
s0 = @current_pos
s1 = match_str("&")
if s1 != :failed
s2 = parse_nonls
if parse_nl != :failed
@reported_pos = s0
s0 = "&" + s2.join
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
end
s0
end
# fork : "変化:" " "* [0-9]+ "手" nl moves
def parse_fork
s0 = @current_pos
if match_str("変化:") != :failed
match_spaces
s3 = parse_te
if s3 != :failed
if match_str("手") != :failed
if parse_nl != :failed
s6 = parse_moves
if s6 != :failed
@reported_pos = s0
s0 = { "te" => s3.join.to_i, "moves" => s6[1..-1] }
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
else
@current_pos = s0
s0 = :failed
end
s0
end
# transfrom to jkf
def transform_root(headers, ini, headers2, moves, forks)
ret = { "header" => {}, "moves" => moves }
headers.compact.each { |h| ret["header"][h["k"]] = h["v"] }
headers2.compact.each { |h| ret["header"][h["k"]] = h["v"] }
if ini
ret["initial"] = ini
elsif ret["header"]["手合割"]
preset = preset2str(ret["header"]["手合割"])
ret["initial"] = { "preset" => preset } if preset && preset != "OTHER"
end
transform_root_header_data(ret) if ret["initial"] && ret["initial"]["data"]
transform_root_forks(forks, moves)
if ret["initial"] && ret["initial"]["data"] && ret["initial"]["data"]["color"] == 1
reverse_color(ret["moves"])
end
ret
end
# transform move to jkf
def transform_move(line, c)
ret = {}
ret["comments"] = c if !c.empty?
if line["move"].is_a? Hash
ret["move"] = line["move"]
else
ret["special"] = special2csa(line["move"])
end
ret["time"] = line["time"] if line["time"]
ret
end
# transform teban-fugou-from to jkf
def transform_teban_fugou_from(teban, fugou, from)
ret = { "color" => teban2color(teban.join), "piece" => fugou["piece"] }
if fugou["to"]
ret["to"] = fugou["to"]
else
ret["same"] = true
end
ret["promote"] = true if fugou["promote"]
ret["from"] = from if from
ret
end
# special string to csa
def special2csa(str)
{
"中断" => "CHUDAN",
"投了" => "TORYO",
"持将棋" => "JISHOGI",
"千日手" => "SENNICHITE",
"詰み" => "TSUMI",
"不詰" => "FUZUMI",
"切れ負け" => "TIME_UP",
"反則勝ち" => "ILLEGAL_ACTION", # 直前の手が反則(先頭に+か-で反則した側の情報を含める必要が有る)
"反則負け" => "ILLEGAL_MOVE" # ここで手番側が反則,反則の内容はコメントで表現
}[str] || (raise ParseError)
end
# teban to color
def teban2color(teban)
teban = teban.to_i unless teban.is_a? Fixnum
(teban + 1) % 2
end
# generate motigoma
# exchange sente gote
def reverse_color(moves)
moves.each do |move|
if move["move"] && move["move"]["color"]
move["move"]["color"] = (move["move"]["color"] + 1) % 2
end
move["forks"].each { |_fork| reverse_color(_fork) } if move["forks"]
end
end
end
|
sandipransing/rails_tiny_mce | plugins/paperclip/lib/paperclip/thumbnail.rb | Paperclip.Thumbnail.transformation_command | ruby | def transformation_command
scale, crop = @current_geometry.transformation_to(@target_geometry, crop?)
trans = []
trans << "-resize" << %["#{scale}"] unless scale.nil? || scale.empty?
trans << "-crop" << %["#{crop}"] << "+repage" if crop
trans
end | Returns the command ImageMagick's +convert+ needs to transform the image
into the thumbnail. | train | https://github.com/sandipransing/rails_tiny_mce/blob/4e91040e62784061aa7cca37fd8a95a87df379ce/plugins/paperclip/lib/paperclip/thumbnail.rb#L71-L77 | class Thumbnail < Processor
attr_accessor :current_geometry, :target_geometry, :format, :whiny, :convert_options, :source_file_options
# Creates a Thumbnail object set to work on the +file+ given. It
# will attempt to transform the image into one defined by +target_geometry+
# which is a "WxH"-style string. +format+ will be inferred from the +file+
# unless specified. Thumbnail creation will raise no errors unless
# +whiny+ is true (which it is, by default. If +convert_options+ is
# set, the options will be appended to the convert command upon image conversion
def initialize file, options = {}, attachment = nil
super
geometry = options[:geometry]
@file = file
@crop = geometry[-1,1] == '#'
@target_geometry = Geometry.parse geometry
@current_geometry = Geometry.from_file @file
@source_file_options = options[:source_file_options]
@convert_options = options[:convert_options]
@whiny = options[:whiny].nil? ? true : options[:whiny]
@format = options[:format]
@source_file_options = @source_file_options.split(/\s+/) if @source_file_options.respond_to?(:split)
@convert_options = @convert_options.split(/\s+/) if @convert_options.respond_to?(:split)
@current_format = File.extname(@file.path)
@basename = File.basename(@file.path, @current_format)
end
# Returns true if the +target_geometry+ is meant to crop.
def crop?
@crop
end
# Returns true if the image is meant to make use of additional convert options.
def convert_options?
!@convert_options.nil? && !@convert_options.empty?
end
# Performs the conversion of the +file+ into a thumbnail. Returns the Tempfile
# that contains the new image.
def make
src = @file
dst = Tempfile.new([@basename, @format ? ".#{@format}" : ''])
dst.binmode
begin
parameters = []
parameters << source_file_options
parameters << ":source"
parameters << transformation_command
parameters << convert_options
parameters << ":dest"
parameters = parameters.flatten.compact.join(" ").strip.squeeze(" ")
success = Paperclip.run("convert", parameters, :source => "#{File.expand_path(src.path)}[0]", :dest => File.expand_path(dst.path))
rescue PaperclipCommandLineError => e
raise PaperclipError, "There was an error processing the thumbnail for #{@basename}" if @whiny
end
dst
end
# Returns the command ImageMagick's +convert+ needs to transform the image
# into the thumbnail.
end
|
hashicorp/vagrant | lib/vagrant/environment.rb | Vagrant.Environment.environment | ruby | def environment(vagrantfile, **opts)
path = File.expand_path(vagrantfile, root_path)
file = File.basename(path)
path = File.dirname(path)
Util::SilenceWarnings.silence! do
Environment.new({
child: true,
cwd: path,
home_path: home_path,
ui_class: ui_class,
vagrantfile_name: file,
}.merge(opts))
end
end | Loads another environment for the given Vagrantfile, sharing as much
useful state from this Environment as possible (such as UI and paths).
Any initialization options can be overidden using the opts hash.
@param [String] vagrantfile Path to a Vagrantfile
@return [Environment] | train | https://github.com/hashicorp/vagrant/blob/c22a145c59790c098f95d50141d9afb48e1ef55f/lib/vagrant/environment.rb#L499-L513 | class Environment
# This is the current version that this version of Vagrant is
# compatible with in the home directory.
#
# @return [String]
CURRENT_SETUP_VERSION = "1.5"
DEFAULT_LOCAL_DATA = ".vagrant"
# The `cwd` that this environment represents
attr_reader :cwd
# The persistent data directory where global data can be stored. It
# is up to the creator of the data in this directory to properly
# remove it when it is no longer needed.
#
# @return [Pathname]
attr_reader :data_dir
# The valid name for a Vagrantfile for this environment.
attr_reader :vagrantfile_name
# The {UI} object to communicate with the outside world.
attr_reader :ui
# This is the UI class to use when creating new UIs.
attr_reader :ui_class
# The directory to the "home" folder that Vagrant will use to store
# global state.
attr_reader :home_path
# The directory to the directory where local, environment-specific
# data is stored.
attr_reader :local_data_path
# The directory where temporary files for Vagrant go.
attr_reader :tmp_path
# File where command line aliases go.
attr_reader :aliases_path
# The directory where boxes are stored.
attr_reader :boxes_path
# The path where the plugins are stored (gems)
attr_reader :gems_path
# The path to the default private key
attr_reader :default_private_key_path
# Initializes a new environment with the given options. The options
# is a hash where the main available key is `cwd`, which defines where
# the environment represents. There are other options available but
# they shouldn't be used in general. If `cwd` is nil, then it defaults
# to the `Dir.pwd` (which is the cwd of the executing process).
def initialize(opts=nil)
opts = {
cwd: nil,
home_path: nil,
local_data_path: nil,
ui_class: nil,
vagrantfile_name: nil,
}.merge(opts || {})
# Set the default working directory to look for the vagrantfile
opts[:cwd] ||= ENV["VAGRANT_CWD"] if ENV.key?("VAGRANT_CWD")
opts[:cwd] ||= Dir.pwd
opts[:cwd] = Pathname.new(opts[:cwd])
if !opts[:cwd].directory?
raise Errors::EnvironmentNonExistentCWD, cwd: opts[:cwd].to_s
end
opts[:cwd] = opts[:cwd].expand_path
# Set the default ui class
opts[:ui_class] ||= UI::Silent
# Set the Vagrantfile name up. We append "Vagrantfile" and "vagrantfile" so that
# those continue to work as well, but anything custom will take precedence.
opts[:vagrantfile_name] ||= ENV["VAGRANT_VAGRANTFILE"] if \
ENV.key?("VAGRANT_VAGRANTFILE")
opts[:vagrantfile_name] = [opts[:vagrantfile_name]] if \
opts[:vagrantfile_name] && !opts[:vagrantfile_name].is_a?(Array)
# Set instance variables for all the configuration parameters.
@cwd = opts[:cwd]
@home_path = opts[:home_path]
@vagrantfile_name = opts[:vagrantfile_name]
@ui = opts[:ui_class].new
@ui_class = opts[:ui_class]
# This is the batch lock, that enforces that only one {BatchAction}
# runs at a time from {#batch}.
@batch_lock = Mutex.new
@locks = {}
@logger = Log4r::Logger.new("vagrant::environment")
@logger.info("Environment initialized (#{self})")
@logger.info(" - cwd: #{cwd}")
# Setup the home directory
@home_path ||= Vagrant.user_data_path
@home_path = Util::Platform.fs_real_path(@home_path)
@boxes_path = @home_path.join("boxes")
@data_dir = @home_path.join("data")
@gems_path = Vagrant::Bundler.instance.plugin_gem_path
@tmp_path = @home_path.join("tmp")
@machine_index_dir = @data_dir.join("machine-index")
@aliases_path = Pathname.new(ENV["VAGRANT_ALIAS_FILE"]).expand_path if ENV.key?("VAGRANT_ALIAS_FILE")
@aliases_path ||= @home_path.join("aliases")
# Prepare the directories
setup_home_path
# Setup the local data directory. If a configuration path is given,
# it is expanded relative to the root path. Otherwise, we use the
# default (which is also expanded relative to the root path).
if !root_path.nil?
if !ENV["VAGRANT_DOTFILE_PATH"].to_s.empty? && !opts[:child]
opts[:local_data_path] ||= Pathname.new(File.expand_path(ENV["VAGRANT_DOTFILE_PATH"], root_path))
else
opts[:local_data_path] ||= root_path.join(DEFAULT_LOCAL_DATA)
end
end
if opts[:local_data_path]
@local_data_path = Pathname.new(File.expand_path(opts[:local_data_path], @cwd))
end
@logger.debug("Effective local data path: #{@local_data_path}")
# If we have a root path, load the ".vagrantplugins" file.
if root_path
plugins_file = root_path.join(".vagrantplugins")
if plugins_file.file?
@logger.info("Loading plugins file: #{plugins_file}")
load plugins_file
end
end
setup_local_data_path
# Setup the default private key
@default_private_key_path = @home_path.join("insecure_private_key")
copy_insecure_private_key
# Initialize localized plugins
plugins = Vagrant::Plugin::Manager.instance.localize!(self)
# Load any environment local plugins
Vagrant::Plugin::Manager.instance.load_plugins(plugins)
# Initialize globalize plugins
plugins = Vagrant::Plugin::Manager.instance.globalize!
# Load any global plugins
Vagrant::Plugin::Manager.instance.load_plugins(plugins)
if !vagrantfile.config.vagrant.plugins.empty?
plugins = process_configured_plugins
end
# Call the hooks that does not require configurations to be loaded
# by using a "clean" action runner
hook(:environment_plugins_loaded, runner: Action::Runner.new(env: self))
# Call the environment load hooks
hook(:environment_load, runner: Action::Runner.new(env: self))
end
# Return a human-friendly string for pretty printed or inspected
# instances.
#
# @return [String]
def inspect
"#<#{self.class}: #{@cwd}>".encode('external')
end
# Action runner for executing actions in the context of this environment.
#
# @return [Action::Runner]
def action_runner
@action_runner ||= Action::Runner.new do
{
action_runner: action_runner,
box_collection: boxes,
hook: method(:hook),
host: host,
machine_index: machine_index,
gems_path: gems_path,
home_path: home_path,
root_path: root_path,
tmp_path: tmp_path,
ui: @ui,
env: self
}
end
end
# Returns a list of machines that this environment is currently
# managing that physically have been created.
#
# An "active" machine is a machine that Vagrant manages that has
# been created. The machine itself may be in any state such as running,
# suspended, etc. but if a machine is "active" then it exists.
#
# Note that the machines in this array may no longer be present in
# the Vagrantfile of this environment. In this case the machine can
# be considered an "orphan." Determining which machines are orphan
# and which aren't is not currently a supported feature, but will
# be in a future version.
#
# @return [Array<String, Symbol>]
def active_machines
# We have no active machines if we have no data path
return [] if !@local_data_path
machine_folder = @local_data_path.join("machines")
# If the machine folder is not a directory then we just return
# an empty array since no active machines exist.
return [] if !machine_folder.directory?
# Traverse the machines folder accumulate a result
result = []
machine_folder.children(true).each do |name_folder|
# If this isn't a directory then it isn't a machine
next if !name_folder.directory?
name = name_folder.basename.to_s.to_sym
name_folder.children(true).each do |provider_folder|
# If this isn't a directory then it isn't a provider
next if !provider_folder.directory?
# If this machine doesn't have an ID, then ignore
next if !provider_folder.join("id").file?
provider = provider_folder.basename.to_s.to_sym
result << [name, provider]
end
end
# Return the results
result
end
# This creates a new batch action, yielding it, and then running it
# once the block is called.
#
# This handles the case where batch actions are disabled by the
# VAGRANT_NO_PARALLEL environmental variable.
def batch(parallel=true)
parallel = false if ENV["VAGRANT_NO_PARALLEL"]
@batch_lock.synchronize do
BatchAction.new(parallel).tap do |b|
# Yield it so that the caller can setup actions
yield b
# And run it!
b.run
end
end
end
# Makes a call to the CLI with the given arguments as if they
# came from the real command line (sometimes they do!). An example:
#
# env.cli("package", "--vagrantfile", "Vagrantfile")
#
def cli(*args)
CLI.new(args.flatten, self).execute
end
# This returns the provider name for the default provider for this
# environment.
#
# @return [Symbol] Name of the default provider.
def default_provider(**opts)
opts[:exclude] = Set.new(opts[:exclude]) if opts[:exclude]
opts[:force_default] = true if !opts.key?(:force_default)
opts[:check_usable] = true if !opts.key?(:check_usable)
# Implement the algorithm from
# https://www.vagrantup.com/docs/providers/basic_usage.html#default-provider
# with additional steps 2.5 and 3.5 from
# https://bugzilla.redhat.com/show_bug.cgi?id=1444492
# to allow system-configured provider priorities.
#
# 1. The --provider flag on a vagrant up is chosen above all else, if it is
# present.
#
# (Step 1 is done by the caller; this method is only called if --provider
# wasn't given.)
#
# 2. If the VAGRANT_DEFAULT_PROVIDER environmental variable is set, it
# takes next priority and will be the provider chosen.
default = ENV["VAGRANT_DEFAULT_PROVIDER"].to_s
if default.empty?
default = nil
else
default = default.to_sym
@logger.debug("Default provider: `#{default}`")
end
# If we're forcing the default, just short-circuit and return
# that (the default behavior)
if default && opts[:force_default]
@logger.debug("Using forced default provider: `#{default}`")
return default
end
# Determine the config to use to look for provider definitions. By
# default it is the global but if we're targeting a specific machine,
# then look there.
root_config = vagrantfile.config
if opts[:machine]
machine_info = vagrantfile.machine_config(opts[:machine], nil, nil, nil)
root_config = machine_info[:config]
end
# Get the list of providers within our configuration, in order.
config = root_config.vm.__providers
# Get the list of usable providers with their internally-declared
# priorities.
usable = []
Vagrant.plugin("2").manager.providers.each do |key, data|
impl = data[0]
popts = data[1]
# Skip excluded providers
next if opts[:exclude] && opts[:exclude].include?(key)
# Skip providers that can't be defaulted, unless they're in our
# config, in which case someone made our decision for us.
if !config.include?(key)
next if popts.key?(:defaultable) && !popts[:defaultable]
end
# Skip providers that aren't usable.
next if opts[:check_usable] && !impl.usable?(false)
# Each provider sets its own priority, defaulting to 5 so we can trust
# it's always set.
usable << [popts[:priority], key]
end
@logger.debug("Initial usable provider list: #{usable}")
# Sort the usable providers by priority. Higher numbers are higher
# priority, otherwise alpha sort.
usable = usable.sort {|a, b| a[0] == b[0] ? a[1] <=> b[1] : b[0] <=> a[0]}
.map {|prio, key| key}
@logger.debug("Priority sorted usable provider list: #{usable}")
# If we're not forcing the default, but it's usable and hasn't been
# otherwise excluded, return it now.
if usable.include?(default)
@logger.debug("Using default provider `#{default}` as it was found in usable list.")
return default
end
# 2.5. Vagrant will go through all of the config.vm.provider calls in the
# Vagrantfile and try each in order. It will choose the first
# provider that is usable and listed in VAGRANT_PREFERRED_PROVIDERS.
preferred = ENV.fetch('VAGRANT_PREFERRED_PROVIDERS', '')
.split(',')
.map {|s| s.strip}
.select {|s| !s.empty?}
.map {|s| s.to_sym}
@logger.debug("Preferred provider list: #{preferred}")
config.each do |key|
if usable.include?(key) && preferred.include?(key)
@logger.debug("Using preferred provider `#{key}` detected in configuration and usable.")
return key
end
end
# 3. Vagrant will go through all of the config.vm.provider calls in the
# Vagrantfile and try each in order. It will choose the first provider
# that is usable. For example, if you configure Hyper-V, it will never
# be chosen on Mac this way. It must be both configured and usable.
config.each do |key|
if usable.include?(key)
@logger.debug("Using provider `#{key}` detected in configuration and usable.")
return key
end
end
# 3.5. Vagrant will go through VAGRANT_PREFERRED_PROVIDERS and find the
# first plugin that reports it is usable.
preferred.each do |key|
if usable.include?(key)
@logger.debug("Using preferred provider `#{key}` found in usable list.")
return key
end
end
# 4. Vagrant will go through all installed provider plugins (including the
# ones that come with Vagrant), and find the first plugin that reports
# it is usable. There is a priority system here: systems that are known
# better have a higher priority than systems that are worse. For
# example, if you have the VMware provider installed, it will always
# take priority over VirtualBox.
if !usable.empty?
@logger.debug("Using provider `#{usable[0]}` as it is the highest priority in the usable list.")
return usable[0]
end
# 5. If Vagrant still has not found any usable providers, it will error.
# No providers available is a critical error for Vagrant.
raise Errors::NoDefaultProvider
end
# Returns whether or not we know how to install the provider with
# the given name.
#
# @return [Boolean]
def can_install_provider?(name)
host.capability?(provider_install_key(name))
end
# Installs the provider with the given name.
#
# This will raise an exception if we don't know how to install the
# provider with the given name. You should guard this call with
# `can_install_provider?` for added safety.
#
# An exception will be raised if there are any failures installing
# the provider.
def install_provider(name)
host.capability(provider_install_key(name))
end
# Returns the collection of boxes for the environment.
#
# @return [BoxCollection]
def boxes
@_boxes ||= BoxCollection.new(
boxes_path,
hook: method(:hook),
temp_dir_root: tmp_path)
end
# Returns the {Config::Loader} that can be used to load Vagrantfiles
# given the settings of this environment.
#
# @return [Config::Loader]
def config_loader
return @config_loader if @config_loader
home_vagrantfile = nil
root_vagrantfile = nil
home_vagrantfile = find_vagrantfile(home_path) if home_path
if root_path
root_vagrantfile = find_vagrantfile(root_path, @vagrantfile_name)
end
@config_loader = Config::Loader.new(
Config::VERSIONS, Config::VERSIONS_ORDER)
@config_loader.set(:home, home_vagrantfile) if home_vagrantfile
@config_loader.set(:root, root_vagrantfile) if root_vagrantfile
@config_loader
end
# Loads another environment for the given Vagrantfile, sharing as much
# useful state from this Environment as possible (such as UI and paths).
# Any initialization options can be overidden using the opts hash.
#
# @param [String] vagrantfile Path to a Vagrantfile
# @return [Environment]
# This defines a hook point where plugin action hooks that are registered
# against the given name will be run in the context of this environment.
#
# @param [Symbol] name Name of the hook.
# @param [Action::Runner] action_runner A custom action runner for running hooks.
def hook(name, opts=nil)
@logger.info("Running hook: #{name}")
opts ||= {}
opts[:callable] ||= Action::Builder.new
opts[:runner] ||= action_runner
opts[:action_name] = name
opts[:env] = self
opts.delete(:runner).run(opts.delete(:callable), opts)
end
# Returns the host object associated with this environment.
#
# @return [Class]
def host
return @host if defined?(@host)
# Determine the host class to use. ":detect" is an old Vagrant config
# that shouldn't be valid anymore, but we respect it here by assuming
# its old behavior. No need to deprecate this because I thin it is
# fairly harmless.
host_klass = vagrantfile.config.vagrant.host
host_klass = nil if host_klass == :detect
begin
@host = Host.new(
host_klass,
Vagrant.plugin("2").manager.hosts,
Vagrant.plugin("2").manager.host_capabilities,
self)
rescue Errors::CapabilityHostNotDetected
# If the auto-detect failed, then we create a brand new host
# with no capabilities and use that. This should almost never happen
# since Vagrant works on most host OS's now, so this is a "slow path"
klass = Class.new(Vagrant.plugin("2", :host)) do
def detect?(env); true; end
end
hosts = { generic: [klass, nil] }
host_caps = {}
@host = Host.new(:generic, hosts, host_caps, self)
rescue Errors::CapabilityHostExplicitNotDetected => e
raise Errors::HostExplicitNotDetected, e.extra_data
end
end
# This acquires a process-level lock with the given name.
#
# The lock file is held within the data directory of this environment,
# so make sure that all environments that are locking are sharing
# the same data directory.
#
# This will raise Errors::EnvironmentLockedError if the lock can't
# be obtained.
#
# @param [String] name Name of the lock, since multiple locks can
# be held at one time.
def lock(name="global", **opts)
f = nil
# If we don't have a block, then locking is useless, so ignore it
return if !block_given?
# This allows multiple locks in the same process to be nested
return yield if @locks[name] || opts[:noop]
# The path to this lock
lock_path = data_dir.join("lock.#{name}.lock")
@logger.debug("Attempting to acquire process-lock: #{name}")
lock("dotlock", noop: name == "dotlock", retry: true) do
f = File.open(lock_path, "w+")
end
# The file locking fails only if it returns "false." If it
# succeeds it returns a 0, so we must explicitly check for
# the proper error case.
while f.flock(File::LOCK_EX | File::LOCK_NB) === false
@logger.warn("Process-lock in use: #{name}")
if !opts[:retry]
raise Errors::EnvironmentLockedError,
name: name
end
sleep 0.2
end
@logger.info("Acquired process lock: #{name}")
result = nil
begin
# Mark that we have a lock
@locks[name] = true
result = yield
ensure
# We need to make sure that no matter what this is always
# reset to false so we don't think we have a lock when we
# actually don't.
@locks.delete(name)
@logger.info("Released process lock: #{name}")
end
# Clean up the lock file, this requires another lock
if name != "dotlock"
lock("dotlock", retry: true) do
f.close
begin
File.delete(lock_path)
rescue
@logger.error(
"Failed to delete lock file #{lock_path} - some other thread " +
"might be trying to acquire it. ignoring this error")
end
end
end
# Return the result
return result
ensure
begin
f.close if f
rescue IOError
end
end
# This executes the push with the given name, raising any exceptions that
# occur.
#
# Precondition: the push is not nil and exists.
def push(name)
@logger.info("Getting push: #{name}")
name = name.to_sym
pushes = self.vagrantfile.config.push.__compiled_pushes
if !pushes.key?(name)
raise Vagrant::Errors::PushStrategyNotDefined,
name: name,
pushes: pushes.keys
end
strategy, config = pushes[name]
push_registry = Vagrant.plugin("2").manager.pushes
klass, _ = push_registry.get(strategy)
if klass.nil?
raise Vagrant::Errors::PushStrategyNotLoaded,
name: strategy,
pushes: push_registry.keys
end
klass.new(self, config).push
end
# The list of pushes defined in this Vagrantfile.
#
# @return [Array<Symbol>]
def pushes
self.vagrantfile.config.push.__compiled_pushes.keys
end
# This returns a machine with the proper provider for this environment.
# The machine named by `name` must be in this environment.
#
# @param [Symbol] name Name of the machine (as configured in the
# Vagrantfile).
# @param [Symbol] provider The provider that this machine should be
# backed by.
# @param [Boolean] refresh If true, then if there is a cached version
# it is reloaded.
# @return [Machine]
def machine(name, provider, refresh=false)
@logger.info("Getting machine: #{name} (#{provider})")
# Compose the cache key of the name and provider, and return from
# the cache if we have that.
cache_key = [name, provider]
@machines ||= {}
if refresh
@logger.info("Refreshing machine (busting cache): #{name} (#{provider})")
@machines.delete(cache_key)
end
if @machines.key?(cache_key)
@logger.info("Returning cached machine: #{name} (#{provider})")
return @machines[cache_key]
end
@logger.info("Uncached load of machine.")
# Determine the machine data directory and pass it to the machine.
machine_data_path = @local_data_path.join(
"machines/#{name}/#{provider}")
# Create the machine and cache it for future calls. This will also
# return the machine from this method.
@machines[cache_key] = vagrantfile.machine(
name, provider, boxes, machine_data_path, self)
end
# The {MachineIndex} to store information about the machines.
#
# @return [MachineIndex]
def machine_index
@machine_index ||= MachineIndex.new(@machine_index_dir)
end
# This returns a list of the configured machines for this environment.
# Each of the names returned by this method is valid to be used with
# the {#machine} method.
#
# @return [Array<Symbol>] Configured machine names.
def machine_names
vagrantfile.machine_names
end
# This returns the name of the machine that is the "primary." In the
# case of a single-machine environment, this is just the single machine
# name. In the case of a multi-machine environment, then this can
# potentially be nil if no primary machine is specified.
#
# @return [Symbol]
def primary_machine_name
vagrantfile.primary_machine_name
end
# The root path is the path where the top-most (loaded last)
# Vagrantfile resides. It can be considered the project root for
# this environment.
#
# @return [String]
def root_path
return @root_path if defined?(@root_path)
root_finder = lambda do |path|
# Note: To remain compatible with Ruby 1.8, we have to use
# a `find` here instead of an `each`.
vf = find_vagrantfile(path, @vagrantfile_name)
return path if vf
return nil if path.root? || !File.exist?(path)
root_finder.call(path.parent)
end
@root_path = root_finder.call(cwd)
end
# Unload the environment, running completion hooks. The environment
# should not be used after this (but CAN be, technically). It is
# recommended to always immediately set the variable to `nil` after
# running this so you can't accidentally run any more methods. Example:
#
# env.unload
# env = nil
#
def unload
hook(:environment_unload)
end
# Represents the default Vagrantfile, or the Vagrantfile that is
# in the working directory or a parent of the working directory
# of this environment.
#
# The existence of this function is primarily a convenience. There
# is nothing stopping you from instantiating your own {Vagrantfile}
# and loading machines in any way you see fit. Typical behavior of
# Vagrant, however, loads this Vagrantfile.
#
# This Vagrantfile is comprised of two major sources: the Vagrantfile
# in the user's home directory as well as the "root" Vagrantfile or
# the Vagrantfile in the working directory (or parent).
#
# @return [Vagrantfile]
def vagrantfile
@vagrantfile ||= Vagrantfile.new(config_loader, [:home, :root])
end
#---------------------------------------------------------------
# Load Methods
#---------------------------------------------------------------
# This sets the `@home_path` variable properly.
#
# @return [Pathname]
def setup_home_path
@logger.info("Home path: #{@home_path}")
# Setup the list of child directories that need to be created if they
# don't already exist.
dirs = [
@home_path,
@home_path.join("rgloader"),
@boxes_path,
@data_dir,
@gems_path,
@tmp_path,
@machine_index_dir,
]
# Go through each required directory, creating it if it doesn't exist
dirs.each do |dir|
next if File.directory?(dir)
begin
@logger.info("Creating: #{dir}")
FileUtils.mkdir_p(dir)
rescue Errno::EACCES
raise Errors::HomeDirectoryNotAccessible, home_path: @home_path.to_s
end
end
# Attempt to write into the home directory to verify we can
begin
# Append a random suffix to avoid race conditions if Vagrant
# is running in parallel with other Vagrant processes.
suffix = (0...32).map { (65 + rand(26)).chr }.join
path = @home_path.join("perm_test_#{suffix}")
path.open("w") do |f|
f.write("hello")
end
path.unlink
rescue Errno::EACCES
raise Errors::HomeDirectoryNotAccessible, home_path: @home_path.to_s
end
# Create the version file that we use to track the structure of
# the home directory. If we have an old version, we need to explicitly
# upgrade it. Otherwise, we just mark that its the current version.
version_file = @home_path.join("setup_version")
if version_file.file?
version = version_file.read.chomp
if version > CURRENT_SETUP_VERSION
raise Errors::HomeDirectoryLaterVersion
end
case version
when CURRENT_SETUP_VERSION
# We're already good, at the latest version.
when "1.1"
# We need to update our directory structure
upgrade_home_path_v1_1
# Delete the version file so we put our latest version in
version_file.delete
else
raise Errors::HomeDirectoryUnknownVersion,
path: @home_path.to_s,
version: version
end
end
if !version_file.file?
@logger.debug(
"Creating home directory version file: #{CURRENT_SETUP_VERSION}")
version_file.open("w") do |f|
f.write(CURRENT_SETUP_VERSION)
end
end
# Create the rgloader/loader file so we can use encoded files.
loader_file = @home_path.join("rgloader", "loader.rb")
if !loader_file.file?
source_loader = Vagrant.source_root.join("templates/rgloader.rb")
FileUtils.cp(source_loader.to_s, loader_file.to_s)
end
end
# This creates the local data directory and show an error if it
# couldn't properly be created.
def setup_local_data_path(force=false)
if @local_data_path.nil?
@logger.warn("No local data path is set. Local data cannot be stored.")
return
end
@logger.info("Local data path: #{@local_data_path}")
# If the local data path is a file, then we are probably seeing an
# old (V1) "dotfile." In this case, we upgrade it. The upgrade process
# will remove the old data file if it is successful.
if @local_data_path.file?
upgrade_v1_dotfile(@local_data_path)
end
# If we don't have a root path, we don't setup anything
return if !force && root_path.nil?
begin
@logger.debug("Creating: #{@local_data_path}")
FileUtils.mkdir_p(@local_data_path)
# Create the rgloader/loader file so we can use encoded files.
loader_file = @local_data_path.join("rgloader", "loader.rb")
if !loader_file.file?
source_loader = Vagrant.source_root.join("templates/rgloader.rb")
FileUtils.mkdir_p(@local_data_path.join("rgloader").to_s)
FileUtils.cp(source_loader.to_s, loader_file.to_s)
end
rescue Errno::EACCES
raise Errors::LocalDataDirectoryNotAccessible,
local_data_path: @local_data_path.to_s
end
end
protected
# Check for any local plugins defined within the Vagrantfile. If
# found, validate they are available. If they are not available,
# request to install them, or raise an exception
#
# @return [Hash] plugin list for loading
def process_configured_plugins
return if !Vagrant.plugins_enabled?
errors = vagrantfile.config.vagrant.validate(nil)
if !errors["vagrant"].empty?
raise Errors::ConfigInvalid,
errors: Util::TemplateRenderer.render(
"config/validation_failed",
errors: errors)
end
# Check if defined plugins are installed
installed = Plugin::Manager.instance.installed_plugins
needs_install = []
config_plugins = vagrantfile.config.vagrant.plugins
config_plugins.each do |name, info|
if !installed[name]
needs_install << name
end
end
if !needs_install.empty?
ui.warn(I18n.t("vagrant.plugins.local.uninstalled_plugins",
plugins: needs_install.sort.join(", ")))
if !Vagrant.auto_install_local_plugins?
answer = nil
until ["y", "n"].include?(answer)
answer = ui.ask(I18n.t("vagrant.plugins.local.request_plugin_install") + " [N]: ")
answer = answer.strip.downcase
answer = "n" if answer.to_s.empty?
end
if answer == "n"
raise Errors::PluginMissingLocalError,
plugins: needs_install.sort.join(", ")
end
end
needs_install.each do |name|
pconfig = Util::HashWithIndifferentAccess.new(config_plugins[name])
ui.info(I18n.t("vagrant.commands.plugin.installing", name: name))
options = {sources: Vagrant::Bundler::DEFAULT_GEM_SOURCES.dup, env_local: true}
options[:sources] = pconfig[:sources] if pconfig[:sources]
options[:require] = pconfig[:entry_point] if pconfig[:entry_point]
options[:version] = pconfig[:version] if pconfig[:version]
spec = Plugin::Manager.instance.install_plugin(name, options)
ui.info(I18n.t("vagrant.commands.plugin.installed",
name: spec.name, version: spec.version.to_s))
end
ui.info("\n")
# Force halt after installation and require command to be run again. This
# will proper load any new locally installed plugins which are now available.
ui.warn(I18n.t("vagrant.plugins.local.install_rerun_command"))
exit(-1)
end
Vagrant::Plugin::Manager.instance.local_file.installed_plugins
end
# This method copies the private key into the home directory if it
# doesn't already exist.
#
# This must be done because `ssh` requires that the key is chmod
# 0600, but if Vagrant is installed as a separate user, then the
# effective uid won't be able to read the key. So the key is copied
# to the home directory and chmod 0600.
def copy_insecure_private_key
if !@default_private_key_path.exist?
@logger.info("Copying private key to home directory")
source = File.expand_path("keys/vagrant", Vagrant.source_root)
destination = @default_private_key_path
begin
FileUtils.cp(source, destination)
rescue Errno::EACCES
raise Errors::CopyPrivateKeyFailed,
source: source,
destination: destination
end
end
if !Util::Platform.windows?
# On Windows, permissions don't matter as much, so don't worry
# about doing chmod.
if Util::FileMode.from_octal(@default_private_key_path.stat.mode) != "600"
@logger.info("Changing permissions on private key to 0600")
@default_private_key_path.chmod(0600)
end
end
end
# Finds the Vagrantfile in the given directory.
#
# @param [Pathname] path Path to search in.
# @return [Pathname]
def find_vagrantfile(search_path, filenames=nil)
filenames ||= ["Vagrantfile", "vagrantfile"]
filenames.each do |vagrantfile|
current_path = search_path.join(vagrantfile)
return current_path if current_path.file?
end
nil
end
# Returns the key used for the host capability for provider installs
# of the given name.
def provider_install_key(name)
"provider_install_#{name}".to_sym
end
# This upgrades a home directory that was in the v1.1 format to the
# v1.5 format. It will raise exceptions if anything fails.
def upgrade_home_path_v1_1
if !ENV["VAGRANT_UPGRADE_SILENT_1_5"]
@ui.ask(I18n.t("vagrant.upgrading_home_path_v1_5"))
end
collection = BoxCollection.new(
@home_path.join("boxes"), temp_dir_root: tmp_path)
collection.upgrade_v1_1_v1_5
end
# This upgrades a Vagrant 1.0.x "dotfile" to the new V2 format.
#
# This is a destructive process. Once the upgrade is complete, the
# old dotfile is removed, and the environment becomes incompatible for
# Vagrant 1.0 environments.
#
# @param [Pathname] path The path to the dotfile
def upgrade_v1_dotfile(path)
@logger.info("Upgrading V1 dotfile to V2 directory structure...")
# First, verify the file isn't empty. If it is an empty file, we
# just delete it and go on with life.
contents = path.read.strip
if contents.strip == ""
@logger.info("V1 dotfile was empty. Removing and moving on.")
path.delete
return
end
# Otherwise, verify there is valid JSON in here since a Vagrant
# environment would always ensure valid JSON. This is a sanity check
# to make sure we don't nuke a dotfile that is not ours...
@logger.debug("Attempting to parse JSON of V1 file")
json_data = nil
begin
json_data = JSON.parse(contents)
@logger.debug("JSON parsed successfully. Things are okay.")
rescue JSON::ParserError
# The file could've been tampered with since Vagrant 1.0.x is
# supposed to ensure that the contents are valid JSON. Show an error.
raise Errors::DotfileUpgradeJSONError,
state_file: path.to_s
end
# Alright, let's upgrade this guy to the new structure. Start by
# backing up the old dotfile.
backup_file = path.dirname.join(".vagrant.v1.#{Time.now.to_i}")
@logger.info("Renaming old dotfile to: #{backup_file}")
path.rename(backup_file)
# Now, we create the actual local data directory. This should succeed
# this time since we renamed the old conflicting V1.
setup_local_data_path(true)
if json_data["active"]
@logger.debug("Upgrading to V2 style for each active VM")
json_data["active"].each do |name, id|
@logger.info("Upgrading dotfile: #{name} (#{id})")
# Create the machine configuration directory
directory = @local_data_path.join("machines/#{name}/virtualbox")
FileUtils.mkdir_p(directory)
# Write the ID file
directory.join("id").open("w+") do |f|
f.write(id)
end
end
end
# Upgrade complete! Let the user know
@ui.info(I18n.t("vagrant.general.upgraded_v1_dotfile",
backup_path: backup_file.to_s))
end
end
|
tarcieri/cool.io | lib/cool.io/io.rb | Coolio.IO.on_readable | ruby | def on_readable
begin
on_read @_io.read_nonblock(INPUT_SIZE)
rescue Errno::EAGAIN, Errno::EINTR
return
# SystemCallError catches Errno::ECONNRESET amongst others.
rescue SystemCallError, EOFError, IOError, SocketError
close
end
end | Read from the input buffer and dispatch to on_read | train | https://github.com/tarcieri/cool.io/blob/0fd3fd1d8e8d81e24f79f809979367abc3f52b92/lib/cool.io/io.rb#L121-L131 | class IO
extend Meta
# Maximum number of bytes to consume at once
INPUT_SIZE = 16384
def initialize(io)
@_io = io
@_write_buffer ||= ::IO::Buffer.new
@_read_watcher = Watcher.new(io, self, :r)
@_write_watcher = Watcher.new(io, self, :w)
end
#
# Watcher methods, delegated to @_read_watcher
#
# Attach to the event loop
def attach(loop)
@_read_watcher.attach(loop)
schedule_write if !@_write_buffer.empty?
self
end
# Detach from the event loop
def detach
# TODO should these detect write buffers, as well?
@_read_watcher.detach
self
end
# Enable the watcher
def enable
@_read_watcher.enable
self
end
# Disable the watcher
def disable
@_read_watcher.disable
self
end
# Is the watcher attached?
def attached?
@_read_watcher.attached?
end
# Is the watcher enabled?
def enabled?
@_read_watcher.enabled?
end
# Obtain the event loop associated with this object
def evloop
@_read_watcher.evloop
end
#
# Callbacks for asynchronous events
#
# Called whenever the IO object receives data
def on_read(data); end
event_callback :on_read
# Called whenever a write completes and the output buffer is empty
def on_write_complete; end
event_callback :on_write_complete
# Called whenever the IO object hits EOF
def on_close; end
event_callback :on_close
#
# Write interface
#
# Write data in a buffered, non-blocking manner
def write(data)
@_write_buffer << data
schedule_write
data.size
end
# Close the IO stream
def close
detach if attached?
detach_write_watcher
@_io.close unless closed?
on_close
nil
end
# Is the IO object closed?
def closed?
@_io.nil? or @_io.closed?
end
#########
protected
#########
# Read from the input buffer and dispatch to on_read
# Write the contents of the output buffer
def on_writable
begin
@_write_buffer.write_to(@_io)
rescue Errno::EINTR
return
# SystemCallError catches Errno::EPIPE & Errno::ECONNRESET amongst others.
rescue SystemCallError, IOError, SocketError
return close
end
if @_write_buffer.empty?
disable_write_watcher
on_write_complete
end
end
# Schedule a write to be performed when the IO object becomes writable
def schedule_write
return unless @_io # this would mean 'we are still pre DNS here'
return unless @_read_watcher.attached? # this would mean 'currently unattached' -- ie still pre DNS, or just plain not attached, which is ok
begin
enable_write_watcher
rescue IOError
end
end
def enable_write_watcher
if @_write_watcher.attached?
@_write_watcher.enable unless @_write_watcher.enabled?
else
@_write_watcher.attach(evloop)
end
end
def disable_write_watcher
@_write_watcher.disable if @_write_watcher and @_write_watcher.enabled?
end
def detach_write_watcher
@_write_watcher.detach if @_write_watcher and @_write_watcher.attached?
end
# Internal class implementing watchers used by Coolio::IO
class Watcher < IOWatcher
def initialize(ruby_io, coolio_io, flags)
@coolio_io = coolio_io
super(ruby_io, flags)
end
# Configure IOWatcher event callbacks to call the method passed to #initialize
def on_readable
@coolio_io.__send__(:on_readable)
end
def on_writable
@coolio_io.__send__(:on_writable)
end
end
end
|
tetradice/neuroncheck | lib/neuroncheck/utils.rb | NeuronCheckSystem.Utils.string_join_using_or_conjunction | ruby | def string_join_using_or_conjunction(strings)
ret = ""
strings.each_with_index do |str, i|
case i
when 0 # 最初の要素
when strings.size - 1 # 最後の要素
ret << " or "
else
ret << ", "
end
ret << str
end
ret
end | 1つ以上の文字列をorで結んだ英語文字列にする | train | https://github.com/tetradice/neuroncheck/blob/0505dedd8f7a8018a3891f7519f7861e1c787014/lib/neuroncheck/utils.rb#L41-L56 | module Utils
module_function
# From ActiveSupport (Thanks for Rails Team!) <https://github.com/rails/rails/tree/master/activesupport>
#
# Truncates a given +text+ after a given <tt>length</tt> if +text+ is longer than <tt>length</tt>:
#
# 'Once upon a time in a world far far away'.truncate(27)
# # => "Once upon a time in a wo..."
#
# Pass a string or regexp <tt>:separator</tt> to truncate +text+ at a natural break:
#
# 'Once upon a time in a world far far away'.truncate(27, separator: ' ')
# # => "Once upon a time in a..."
#
# 'Once upon a time in a world far far away'.truncate(27, separator: /\s/)
# # => "Once upon a time in a..."
#
# The last characters will be replaced with the <tt>:omission</tt> string (defaults to "...")
# for a total length not exceeding <tt>length</tt>:
#
# 'And they found that many people were sleeping better.'.truncate(25, omission: '... (continued)')
# # => "And they f... (continued)"
def truncate(str, truncate_at, omission: '...', separator: nil)
return str.dup unless str.length > truncate_at
omission = omission || '...'
length_with_room_for_omission = truncate_at - omission.length
stop = \
if separator
rindex(separator, length_with_room_for_omission) || length_with_room_for_omission
else
length_with_room_for_omission
end
"#{str[0, stop]}#{omission}"
end
# 1つ以上の文字列をorで結んだ英語文字列にする
def string_join_using_or_conjunction(strings)
ret = ""
strings.each_with_index do |str, i|
case i
when 0 # 最初の要素
when strings.size - 1 # 最後の要素
ret << " or "
else
ret << ", "
end
ret << str
end
ret
end
# Thread::Backtrace::Locationのリストを文字列形式に変換。フレーム数が多すぎる場合は途中を省略
def backtrace_locations_to_captions(locations)
locs = nil
if locations.size > 9 then
locs = (locations[0..3].map{|x| "from #{x.to_s}"} + [" ... (#{locations.size - 8} frames) ..."] + locations[-4..-1].map{|x| "from #{x.to_s}"})
else
locs = locations.map{|x| "from #{x.to_s}"}
end
if locs.size >= 1 then
locs.first.sub!(/\A\s*from /, '')
end
locs
end
# 指定した整数値を序数文字列にする
def ordinalize(v)
if [11,12,13].include?(v % 100)
"#{v}th"
else
case (v % 10)
when 1
"#{v}st"
when 2
"#{v}nd"
when 3
"#{v}rd"
else
"#{v}th"
end
end
end
end
|
chaintope/bitcoinrb | lib/bitcoin/script/script.rb | Bitcoin.Script.to_script_code | ruby | def to_script_code(skip_separator_index = 0)
payload = to_payload
if p2wpkh?
payload = Script.to_p2pkh(chunks[1].pushed_data.bth).to_payload
elsif skip_separator_index > 0
payload = subscript_codeseparator(skip_separator_index)
end
Bitcoin.pack_var_string(payload)
end | If this script is witness program, return its script code,
otherwise returns the self payload. ScriptInterpreter does not use this. | train | https://github.com/chaintope/bitcoinrb/blob/39396e4c9815214d6b0ab694fa8326978a7f5438/lib/bitcoin/script/script.rb#L249-L257 | class Script
include Bitcoin::Opcodes
attr_accessor :chunks
def initialize
@chunks = []
end
# generate P2PKH script
def self.to_p2pkh(pubkey_hash)
new << OP_DUP << OP_HASH160 << pubkey_hash << OP_EQUALVERIFY << OP_CHECKSIG
end
# generate P2WPKH script
def self.to_p2wpkh(pubkey_hash)
new << WITNESS_VERSION << pubkey_hash
end
# generate m of n multisig p2sh script
# @param [String] m the number of signatures required for multisig
# @param [Array] pubkeys array of public keys that compose multisig
# @return [Script, Script] first element is p2sh script, second one is redeem script.
def self.to_p2sh_multisig_script(m, pubkeys)
redeem_script = to_multisig_script(m, pubkeys)
[redeem_script.to_p2sh, redeem_script]
end
# generate p2sh script.
# @param [String] script_hash script hash for P2SH
# @return [Script] P2SH script
def self.to_p2sh(script_hash)
Script.new << OP_HASH160 << script_hash << OP_EQUAL
end
# generate p2sh script with this as a redeem script
# @return [Script] P2SH script
def to_p2sh
Script.to_p2sh(to_hash160)
end
def get_multisig_pubkeys
num = Bitcoin::Opcodes.opcode_to_small_int(chunks[-2].bth.to_i(16))
(1..num).map{ |i| chunks[i].pushed_data }
end
# generate m of n multisig script
# @param [String] m the number of signatures required for multisig
# @param [Array] pubkeys array of public keys that compose multisig
# @return [Script] multisig script.
def self.to_multisig_script(m, pubkeys)
new << m << pubkeys << pubkeys.size << OP_CHECKMULTISIG
end
# generate p2wsh script for +redeem_script+
# @param [Script] redeem_script target redeem script
# @param [Script] p2wsh script
def self.to_p2wsh(redeem_script)
new << WITNESS_VERSION << redeem_script.to_sha256
end
# generate script from string.
def self.from_string(string)
script = new
string.split(' ').each do |v|
opcode = Opcodes.name_to_opcode(v)
if opcode
script << (v =~ /^\d/ && Opcodes.small_int_to_opcode(v.ord) ? v.ord : opcode)
else
script << (v =~ /^[0-9]+$/ ? v.to_i : v)
end
end
script
end
# generate script from addr.
# @param [String] addr address.
# @return [Bitcoin::Script] parsed script.
def self.parse_from_addr(addr)
begin
segwit_addr = Bech32::SegwitAddr.new(addr)
raise 'Invalid hrp.' unless Bitcoin.chain_params.bech32_hrp == segwit_addr.hrp
Bitcoin::Script.parse_from_payload(segwit_addr.to_script_pubkey.htb)
rescue Exception => e
hex, addr_version = Bitcoin.decode_base58_address(addr)
case addr_version
when Bitcoin.chain_params.address_version
Bitcoin::Script.to_p2pkh(hex)
when Bitcoin.chain_params.p2sh_version
Bitcoin::Script.to_p2sh(hex)
else
throw e
end
end
end
def self.parse_from_payload(payload)
s = new
buf = StringIO.new(payload)
until buf.eof?
opcode = buf.read(1)
if opcode.pushdata?
pushcode = opcode.ord
packed_size = nil
len = case pushcode
when OP_PUSHDATA1
packed_size = buf.read(1)
packed_size.unpack('C').first
when OP_PUSHDATA2
packed_size = buf.read(2)
packed_size.unpack('v').first
when OP_PUSHDATA4
packed_size = buf.read(4)
packed_size.unpack('V').first
else
pushcode if pushcode < OP_PUSHDATA1
end
if len
s.chunks << [len].pack('C') if buf.eof?
unless buf.eof?
chunk = (packed_size ? (opcode + packed_size) : (opcode)) + buf.read(len)
s.chunks << chunk
end
end
else
if Opcodes.defined?(opcode.ord)
s << opcode.ord
else
s.chunks << (opcode + buf.read) # If opcode is invalid, put all remaining data in last chunk.
end
end
end
s
end
def to_payload
chunks.join
end
def to_hex
to_payload.bth
end
def empty?
chunks.size == 0
end
def addresses
return [p2pkh_addr] if p2pkh?
return [p2sh_addr] if p2sh?
return [bech32_addr] if witness_program?
return get_multisig_pubkeys.map{|pubkey| Bitcoin::Key.new(pubkey: pubkey.bth).to_p2pkh} if multisig?
[]
end
# check whether standard script.
def standard?
p2pkh? | p2sh? | p2wpkh? | p2wsh? | multisig? | standard_op_return?
end
# whether this script is a P2PKH format script.
def p2pkh?
return false unless chunks.size == 5
[OP_DUP, OP_HASH160, OP_EQUALVERIFY, OP_CHECKSIG] ==
(chunks[0..1]+ chunks[3..4]).map(&:ord) && chunks[2].bytesize == 21
end
# whether this script is a P2WPKH format script.
def p2wpkh?
return false unless chunks.size == 2
chunks[0].ord == WITNESS_VERSION && chunks[1].bytesize == 21
end
def p2wsh?
return false unless chunks.size == 2
chunks[0].ord == WITNESS_VERSION && chunks[1].bytesize == 33
end
def p2sh?
return false unless chunks.size == 3
OP_HASH160 == chunks[0].ord && OP_EQUAL == chunks[2].ord && chunks[1].bytesize == 21
end
def multisig?
return false if chunks.size < 4 || chunks.last.ord != OP_CHECKMULTISIG
pubkey_count = Opcodes.opcode_to_small_int(chunks[-2].opcode)
sig_count = Opcodes.opcode_to_small_int(chunks[0].opcode)
return false unless pubkey_count || sig_count
sig_count <= pubkey_count
end
def op_return?
chunks.size >= 1 && chunks[0].ord == OP_RETURN
end
def standard_op_return?
op_return? && size <= MAX_OP_RETURN_RELAY &&
(chunks.size == 1 || chunks[1].opcode <= OP_16)
end
def op_return_data
return nil unless op_return?
return nil if chunks.size == 1
chunks[1].pushed_data
end
# whether data push only script which dose not include other opcode
def push_only?
chunks.each do |c|
return false if !c.opcode.nil? && c.opcode > OP_16
end
true
end
# A witness program is any valid Script that consists of a 1-byte push opcode followed by a data push between 2 and 40 bytes.
def witness_program?
return false if size < 4 || size > 42 || chunks.size < 2
opcode = chunks[0].opcode
return false if opcode != OP_0 && (opcode < OP_1 || opcode > OP_16)
return false unless chunks[1].pushdata?
if size == (chunks[1][0].unpack('C').first + 2)
program_size = chunks[1].pushed_data.bytesize
return program_size >= 2 && program_size <= 40
end
false
end
# get witness commitment
def witness_commitment
return nil if !op_return? || op_return_data.bytesize < 36
buf = StringIO.new(op_return_data)
return nil unless buf.read(4).bth == WITNESS_COMMITMENT_HEADER
buf.read(32).bth
end
# If this script is witness program, return its script code,
# otherwise returns the self payload. ScriptInterpreter does not use this.
# get witness version and witness program
def witness_data
version = opcode_to_small_int(chunks[0].opcode)
program = chunks[1].pushed_data
[version, program]
end
# append object to payload
def <<(obj)
if obj.is_a?(Integer)
push_int(obj)
elsif obj.is_a?(String)
append_data(obj)
elsif obj.is_a?(Array)
obj.each { |o| self.<< o}
self
end
end
# push integer to stack.
def push_int(n)
begin
append_opcode(n)
rescue ArgumentError
append_data(Script.encode_number(n))
end
self
end
# append opcode to payload
# @param [Integer] opcode append opcode which defined by Bitcoin::Opcodes
# @return [Script] return self
def append_opcode(opcode)
opcode = Opcodes.small_int_to_opcode(opcode) if -1 <= opcode && opcode <= 16
raise ArgumentError, "specified invalid opcode #{opcode}." unless Opcodes.defined?(opcode)
chunks << opcode.chr
self
end
# append data to payload with pushdata opcode
# @param [String] data append data. this data is not binary
# @return [Script] return self
def append_data(data)
data = Encoding::ASCII_8BIT == data.encoding ? data : data.htb
chunks << Bitcoin::Script.pack_pushdata(data)
self
end
# Check the item is in the chunk of the script.
def include?(item)
chunk_item = if item.is_a?(Integer)
item.chr
elsif item.is_a?(String)
data = Encoding::ASCII_8BIT == item.encoding ? item : item.htb
Bitcoin::Script.pack_pushdata(data)
end
return false unless chunk_item
chunks.include?(chunk_item)
end
def to_s
chunks.map { |c|
case c
when Integer
opcode_to_name(c)
when String
if c.pushdata?
v = Opcodes.opcode_to_small_int(c.ord)
if v
v
else
data = c.pushed_data
if data.bytesize <= 4
Script.decode_number(data.bth) # for scriptnum
else
data.bth
end
end
else
opcode = Opcodes.opcode_to_name(c.ord)
opcode ? opcode : 'OP_UNKNOWN [error]'
end
end
}.join(' ')
end
# generate sha-256 hash for payload
def to_sha256
Bitcoin.sha256(to_payload).bth
end
# generate hash160 hash for payload
def to_hash160
Bitcoin.hash160(to_payload.bth)
end
# script size
def size
to_payload.bytesize
end
# execute script interpreter using this script for development.
def run
Bitcoin::ScriptInterpreter.eval(Bitcoin::Script.new, self.dup)
end
# encode int value to script number hex.
# The stacks hold byte vectors.
# When used as numbers, byte vectors are interpreted as little-endian variable-length integers
# with the most significant bit determining the sign of the integer.
# Thus 0x81 represents -1. 0x80 is another representation of zero (so called negative 0).
# Positive 0 is represented by a null-length vector.
# Byte vectors are interpreted as Booleans where False is represented by any representation of zero,
# and True is represented by any representation of non-zero.
def self.encode_number(i)
return '' if i == 0
negative = i < 0
hex = i.abs.to_even_length_hex
hex = '0' + hex unless (hex.length % 2).zero?
v = hex.htb.reverse # change endian
v = v << (negative ? 0x80 : 0x00) unless (v[-1].unpack('C').first & 0x80) == 0
v[-1] = [v[-1].unpack('C').first | 0x80].pack('C') if negative
v.bth
end
# decode script number hex to int value
def self.decode_number(s)
v = s.htb.reverse
return 0 if v.length.zero?
mbs = v[0].unpack('C').first
v[0] = [mbs - 0x80].pack('C') unless (mbs & 0x80) == 0
result = v.bth.to_i(16)
result = -result unless (mbs & 0x80) == 0
result
end
# binary +data+ convert pushdata which contains data length and append PUSHDATA opcode if necessary.
def self.pack_pushdata(data)
size = data.bytesize
header = if size < OP_PUSHDATA1
[size].pack('C')
elsif size < 0xff
[OP_PUSHDATA1, size].pack('CC')
elsif size < 0xffff
[OP_PUSHDATA2, size].pack('Cv')
elsif size < 0xffffffff
[OP_PUSHDATA4, size].pack('CV')
else
raise ArgumentError, 'data size is too big.'
end
header + data
end
# subscript this script to the specified range.
def subscript(*args)
s = self.class.new
s.chunks = chunks[*args]
s
end
# removes chunks matching subscript byte-for-byte and returns as a new object.
def find_and_delete(subscript)
raise ArgumentError, 'subscript must be Bitcoin::Script' unless subscript.is_a?(Script)
return self if subscript.chunks.empty?
buf = []
i = 0
result = Script.new
chunks.each do |chunk|
sub_chunk = subscript.chunks[i]
if chunk.start_with?(sub_chunk)
if chunk == sub_chunk
buf << chunk
i += 1
(i = 0; buf.clear) if i == subscript.chunks.size # matched the whole subscript
else # matched the part of head
i = 0
tmp = chunk.dup
tmp.slice!(sub_chunk)
result.chunks << tmp
end
else
result.chunks << buf.join unless buf.empty?
if buf.first == chunk
i = 1
buf = [chunk]
else
i = 0
result.chunks << chunk
end
end
end
result
end
# remove all occurences of opcode. Typically it's OP_CODESEPARATOR.
def delete_opcode(opcode)
@chunks = chunks.select{|chunk| chunk.ord != opcode}
self
end
# Returns a script that deleted the script before the index specified by separator_index.
def subscript_codeseparator(separator_index)
buf = []
process_separator_index = 0
chunks.each{|chunk|
buf << chunk if process_separator_index == separator_index
if chunk.ord == OP_CODESEPARATOR && process_separator_index < separator_index
process_separator_index += 1
end
}
buf.join
end
def ==(other)
return false unless other
chunks == other.chunks
end
def type
return 'pubkeyhash' if p2pkh?
return 'scripthash' if p2sh?
return 'multisig' if multisig?
return 'witness_v0_keyhash' if p2wpkh?
return 'witness_v0_scripthash' if p2wsh?
'nonstandard'
end
def to_h
h = {asm: to_s, hex: to_payload.bth, type: type}
addrs = addresses
unless addrs.empty?
h[:req_sigs] = multisig? ? Bitcoin::Opcodes.opcode_to_small_int(chunks[0].bth.to_i(16)) :addrs.size
h[:addresses] = addrs
end
h
end
private
# generate p2pkh address. if script dose not p2pkh, return nil.
def p2pkh_addr
return nil unless p2pkh?
hash160 = chunks[2].pushed_data.bth
return nil unless hash160.htb.bytesize == 20
Bitcoin.encode_base58_address(hash160, Bitcoin.chain_params.address_version)
end
# generate p2wpkh address. if script dose not p2wpkh, return nil.
def p2wpkh_addr
p2wpkh? ? bech32_addr : nil
end
# generate p2sh address. if script dose not p2sh, return nil.
def p2sh_addr
return nil unless p2sh?
hash160 = chunks[1].pushed_data.bth
return nil unless hash160.htb.bytesize == 20
Bitcoin.encode_base58_address(hash160, Bitcoin.chain_params.p2sh_version)
end
# generate p2wsh address. if script dose not p2wsh, return nil.
def p2wsh_addr
p2wsh? ? bech32_addr : nil
end
# return bech32 address for payload
def bech32_addr
segwit_addr = Bech32::SegwitAddr.new
segwit_addr.hrp = Bitcoin.chain_params.bech32_hrp
segwit_addr.script_pubkey = to_payload.bth
segwit_addr.addr
end
end
|
metanorma/relaton | lib/relaton/db.rb | Relaton.Db.open_cache_biblio | ruby | def open_cache_biblio(dir, global: true)
return nil if dir.nil?
db = DbCache.new dir
if File.exist? dir
if global
unless db.check_version?
FileUtils.rm_rf(Dir.glob(dir + '/*'), secure: true)
warn "Global cache version is obsolete and cleared."
end
db.set_version
elsif db.check_version? then db
else
warn "Local cache version is obsolete."
nil
end
else db.set_version
end
end | if cached reference is undated, expire it after 60 days
@param bib [Hash]
@param year [String]
def valid_bib_entry?(bib, year)
bib&.is_a?(Hash) && bib&.has_key?("bib") && bib&.has_key?("fetched") &&
(year || Date.today - bib["fetched"] < 60)
end
@param dir [String] DB directory
@param global [TrueClass, FalseClass]
@return [PStore] | train | https://github.com/metanorma/relaton/blob/2fac19da2f3ef3c30b8e8d8815a14d2115df0be6/lib/relaton/db.rb#L189-L206 | class Db
SUPPORTED_GEMS = %w[isobib ietfbib gbbib iecbib nistbib].freeze
# @param global_cache [String] directory of global DB
# @param local_cache [String] directory of local DB
def initialize(global_cache, local_cache)
register_gems
@registry = Relaton::Registry.instance
@db = open_cache_biblio(global_cache)
@local_db = open_cache_biblio(local_cache, global: false)
@db_name = global_cache
@local_db_name = local_cache
end
def register_gems
puts "[relaton] Info: detecting backends:"
SUPPORTED_GEMS.each do |b|
# puts b
begin
require b
rescue LoadError
puts "[relaton] Error: backend #{b} not present"
end
end
end
# The class of reference requested is determined by the prefix of the code:
# GB Standard for gbbib, IETF for ietfbib, ISO for isobib, IEC or IEV for iecbib,
# @param code [String] the ISO standard Code to look up (e.g. "ISO 9000")
# @param year [String] the year the standard was published (optional)
# @param opts [Hash] options; restricted to :all_parts if all-parts reference is required
# @return [String] Relaton XML serialisation of reference
def fetch(code, year = nil, opts = {})
stdclass = standard_class(code) or return nil
check_bibliocache(code, year, opts, stdclass)
end
def fetch_std(code, year = nil, stdclass = nil, opts = {})
std = nil
@registry.processors.each do |name, processor|
std = name if processor.prefix == stdclass
end
unless std
std = standard_class(code) or return nil
end
check_bibliocache(code, year, opts, std)
end
def fetched(key)
return @local_db.fetched key if @local_db
return @db.fetched key if @db
""
end
# The document identifier class corresponding to the given code
def docid_type(code)
stdclass = standard_class(code) or return [nil, code]
prefix, code = strip_id_wrapper(code, stdclass)
[@registry.processors[stdclass].idtype, code]
end
# @param key [String]
# @return [Hash]
def load_entry(key)
unless @local_db.nil?
entry = @local_db[key]
return entry if entry
end
@db[key]
end
# @param key [String]
# @param value [String] Bibitem xml serialisation.
# @option value [String] Bibitem xml serialisation.
def save_entry(key, value)
@db.nil? || (@db[key] = value)
@local_db.nil? || (@local_db[key] = value)
end
# list all entries as a serialization
# @return [String]
def to_xml
db = @local_db || @db || return
Nokogiri::XML::Builder.new(encoding: "UTF-8") do |xml|
xml.documents do
xml.parent.add_child db.all.join(" ")
end
end.to_xml
end
private
# @param code [String] code of standard
# @return [Symbol] standard class name
def standard_class(code)
@registry.processors.each do |name, processor|
return name if /^#{processor.prefix}/.match(code) ||
processor.defaultprefix.match(code)
end
allowed = @registry.processors.reduce([]) do |m, (_k, v)|
m << v.prefix
end
warn "#{code} does not have a recognised prefix: #{allowed.join(', ')}"
nil
end
# TODO: i18n
# Fofmat ID
# @param code [String]
# @param year [String]
# @param opts [Hash]
# @param stdClass [Symbol]
# @return [Array]
def std_id(code, year, opts, stdclass)
prefix, code = strip_id_wrapper(code, stdclass)
ret = code
ret += ":#{year}" if year
ret += " (all parts)" if opts[:all_parts]
["#{prefix}(#{ret})", code]
end
# Find prefix and clean code
# @param code [String]
# @param stdClass [Symbol]
# @return [Array]
def strip_id_wrapper(code, stdclass)
prefix = @registry.processors[stdclass].prefix
code = code.sub(/^#{prefix}\((.+)\)$/, "\\1")
[prefix, code]
end
def bib_retval(entry, stdclass)
entry =~ /^not_found/ ? nil : @registry.processors[stdclass].from_xml(entry)
end
# @param code [String]
# @param year [String]
# @param opts [Hash]
# @param stdclass [Symbol]
def check_bibliocache(code, year, opts, stdclass)
id, searchcode = std_id(code, year, opts, stdclass)
db = @local_db || @db
altdb = @local_db && @db ? @db : nil
return bib_retval(new_bib_entry(searchcode, year, opts, stdclass), stdclass) if db.nil?
db.delete(id) unless db.valid_entry?(id, year)
if altdb
db[id] ||= altdb[id]
db[id] ||= new_bib_entry(searchcode, year, opts, stdclass)
altdb[id] = db[id] if !altdb.valid_entry?(id, year)
else
db[id] ||= new_bib_entry(searchcode, year, opts, stdclass)
end
bib_retval(db[id], stdclass)
end
# hash uses => , because the hash is imported from JSON
# @param code [String]
# @param year [String]
# @param opts [Hash]
# @param stdclass [Symbol]
# @return [Hash]
def new_bib_entry(code, year, opts, stdclass)
bib = @registry.processors[stdclass].get(code, year, opts)
bib = bib.to_xml if bib.respond_to? :to_xml
bib = "not_found #{Date.today}" if bib.nil? || bib.empty?
bib
end
# if cached reference is undated, expire it after 60 days
# @param bib [Hash]
# @param year [String]
# def valid_bib_entry?(bib, year)
# bib&.is_a?(Hash) && bib&.has_key?("bib") && bib&.has_key?("fetched") &&
# (year || Date.today - bib["fetched"] < 60)
# end
# @param dir [String] DB directory
# @param global [TrueClass, FalseClass]
# @return [PStore]
# Check if version of the DB match to the gem version.
# @param cache_db [String] DB directory
# @return [TrueClass, FalseClass]
# def check_cache_version(cache_db)
# cache_db.transaction { cache_db[:version] == VERSION }
# end
# Set version of the DB to the gem version.
# @param cache_db [String] DB directory
# @return [Pstore]
# def set_cache_version(cache_db)
# unless File.exist? cache_db.path
# cache_db.transaction { cache_db[:version] = VERSION }
# end
# cache_db
# end
# @param enstry [String] entry in XML format
# @return [IsoBibItem::IsoBibliographicItem]
# def from_xml(entry)
# IsoBibItem.from_xml entry # will be unmarshaller
# end
# @param [Hash{String=>Hash{String=>String}}] biblio
# def save_cache_biblio(biblio, filename)
# return if biblio.nil? || filename.nil?
# File.open(filename, "w") do |b|
# b << biblio.reduce({}) do |s, (k, v)|
# bib = v["bib"].respond_to?(:to_xml) ? v["bib"].to_xml : v["bib"]
# s.merge(k => { "fetched" => v["fetched"], "bib" => bib })
# end.to_json
# end
# end
end
|
ikayzo/SDL.rb | lib/sdl4r/tag.rb | SDL4R.Tag.namespace= | ruby | def namespace=(a_namespace)
a_namespace = a_namespace.to_s
SDL4R.validate_identifier(a_namespace) unless a_namespace.empty?
@namespace = a_namespace
end | The namespace to set. +nil+ will be coerced to the empty string.
Raises +ArgumentError+ if the namespace is non-blank and is not
a legal SDL identifier (see SDL4R#validate_identifier) | train | https://github.com/ikayzo/SDL.rb/blob/1663b9f5aa95d8d6269f060e343c2d2fd9309259/lib/sdl4r/tag.rb#L707-L711 | class Tag
# the name of this Tag
#
attr_reader :name
# the namespace of this Tag or an empty string when there is no namespace (i.e. default
# namespace).
#
attr_reader :namespace
# Convenient method to check and handle a pair of parameters namespace/name where, in some
# cases, only one is specified (i.e. the name only).
#
# Use at the beginning of a method in order to have correctly defined parameters:
# def foo(namespace, name = nil)
# namespace, name = to_nns namespace, name
# end
#
def to_nns(namespace, name)
if name.nil? and not namespace.nil?
name = namespace
namespace = ""
end
return namespace, name
end
private :to_nns
# Creates an empty tag in the given namespace. If the +namespace+ is nil
# it will be coerced to an empty String.
#
# tag = Tag.new("name")
# tag = Tag.new("namespace", "name")
#
# tag = Tag.new("fruit") do
# add_value 2
# new_child("orange") do
# set_attribute("quantity", 2)
# end
# end
#
# which builds the following SDL structure
#
# fruit 2 {
# orange quantity=2
# }
#
# If you provide a block that takes an argument, you will write the same example, as follows:
#
# tag = Tag.new("fruit") do |t|
# t.add_value 2
# t.new_child("orange") do
# set_attribute("quantity", 2)
# end
# end
#
# In this case, the current context is not the new Tag anymore but the context of your code.
#
# === Raises
# ArgumentError if the name is not a legal SDL identifier
# (see SDL4R#validate_identifier) or the namespace is non-blank
# and is not a legal SDL identifier.
#
def initialize(namespace, name = nil, &block)
namespace, name = to_nns namespace, name
raise ArgumentError, "tag namespace must be a String" unless namespace.is_a? String
raise ArgumentError, "tag name must be a String" unless name.is_a? String
SDL4R.validate_identifier(namespace) unless namespace.empty?
@namespace = namespace
name = name.to_s.strip
raise ArgumentError, "Tag name cannot be nil or empty" if name.empty?
SDL4R.validate_identifier(name)
@name = name
@children = []
@values = []
# a Hash of Hash : {namespace => {name => value}}
# The default namespace is represented by an empty string.
@attributesByNamespace = {}
if block_given?
if block.arity > 0
block[self]
else
instance_eval(&block)
end
end
end
# Creates a new child tag.
# Can take a block so that you can write something like:
#
# car = Tag.new("car") do
# new_child("wheels") do
# self << 4
# end
# end
#
# The context of execution of the given block is the child instance.
# If you provide a block that takes a parameter (see below), the context is the context of your
# code:
#
# car = Tag.new("car") do |child|
# child.new_child("wheels") do |grandchild|
# grandchild << 4
# end
# end
#
# Returns the created child Tag.
#
def new_child(*args, &block)
return add_child Tag.new(*args, &block)
end
# Add a child to this Tag.
#
# _child_:: The child to add
#
# Returns the added child.
#
def add_child(child)
@children.push(child)
return child
end
# Adds the given object as a child if it is a +Tag+, as an attribute if it is a Hash
# {key => value} (supports namespaces), or as a value otherwise.
# If it is an Enumerable (e.g. Array), each of its elements is added to this Tag via this
# operator. If any of its elements is itself an Enumerable, then an anonymous tag is created and
# the Enumerable is passed to it via this operator (see the examples below).
#
# tag << Tag.new("child")
# tag << 123 # new integer value
# tag << "islamabad" # new string value
# tag << { "metric:length" => 1027 } # new attribute (with namespace)
# tag << [nil, 456, "abc"] # several values added
#
# tag = Tag.new("tag")
# tag << [[1, 2, 3], [4, 5, 6]] # tag {
# # 1 2 3
# # 4 5 6
# # }
#
# Of course, despite the fact that String is an Enumerable, it is considered as the type of
# values.
#
# Returns +self+.
#
# Use other accessors (#add_child, #add_value, #attributes, etc) for a stricter and less
# "magical" behavior.
#
def <<(o)
if o.is_a?(Tag)
add_child(o)
elsif o.is_a?(Hash)
o.each_pair { |key, value|
namespace, key = key.split(/:/) if key.match(/:/)
namespace ||= ""
set_attribute(namespace, key, value)
}
elsif o.is_a? String
add_value(o)
elsif o.is_a? Enumerable
o.each { |item|
if item.is_a? Enumerable and not item.is_a? String
anonymous = new_child("content")
anonymous << item
else
self << item
end
}
else
add_value(o)
end
return self
end
# Remove a child from this Tag
#
# _child_:: the child to remove
#
# Returns true if the child exists and is removed
#
def remove_child(child)
return [email protected](child).nil?
end
# Removes all children.
#
def clear_children
@children = []
nil
end
#
# A convenience method that sets the first value in the value list.
# See # #add_value for legal types.
#
# _value_:: The value to be set.
#
# === Raises
#
# _ArgumentError_:: if the value is not a legal SDL type
#
def value=(value)
@values[0] = SDL4R.coerce_or_fail(value)
nil
end
#
# A convenience method that returns the first value.
#
def value
@values[0]
end
# Returns the number of children Tag.
#
def child_count
@children.size
end
# children(recursive)
# children(recursive, name)
# children(recursive, namespace, name)
#
# children(recursive) { |child| ... }
# children(recursive, name) { |child| ... }
# children(recursive, namespace, name) { |child| ... }
#
# Returns an Array of the children Tags of this Tag or enumerates them.
#
# _recursive_:: if true children and all descendants will be returned. False by default.
# _name_:: if not nil, only children having this name will be returned. Nil by default.
# _namespace_:: use nil for all namespaces and "" for the default one. Nil by default.
#
# tag.children # => array of the children
# tag.children(true) { |descendant| ... }
#
# tag.children(false, "name") # => children of name "name"
# tag.children(false, "ns", nil) # => children of namespace "ns"
#
def children(recursive = false, namespace = nil, name = :DEFAULT, &block) # :yields: child
if name == :DEFAULT
name = namespace
namespace = nil
end
if block_given?
each_child(recursive, namespace, name, &block)
return nil
else
unless recursive or name or namespace
return @children
else
result = []
each_child(recursive, namespace, name) { |child|
result << child
}
return result
end
end
end
# Returns the values of all the children with the given +name+. If the child has
# more than one value, all the values will be added as an array. If the child
# has no value, +nil+ will be added. The search is not recursive.
#
# _name_:: if nil, all children are considered (nil by default).
def children_values(name = nil)
children_values = []
each_child(false, name) { |child|
case child.values.size
when 0
children_values << nil
when 1
children_values << child.value
else
children_values << child.values
end
}
return children_values
end
# child
# child(name)
# child(recursive, name)
#
# Get the first child with the given name, optionally using a recursive search.
#
# _name_:: the name of the child Tag. If +nil+, the first child is returned (+nil+ if there are
# no children at all).
#
# Returns the first child tag having the given name or +nil+ if no such child exists
#
def child(recursive = false, name = nil)
if name.nil?
name = recursive
recursive = false
end
unless name
return @children.first
else
each_child(recursive, name) { |child| return child }
end
end
# Indicates whether the child Tag of given name exists.
#
# _name_:: name of the searched child Tag
#
def has_child?(name)
!child(name).nil?
end
# Indicates whether there are children Tag.
#
def has_children?
[email protected]?
end
# Enumerates the children +Tag+s of this Tag and calls the given block
# providing it the child as parameter.
#
# _recursive_:: if true, enumerate grand-children, etc, recursively
# _namespace_:: if not nil, indicates the namespace of the children to enumerate
# _name_:: if not nil, indicates the name of the children to enumerate
#
def each_child(recursive = false, namespace = nil, name = :DEFAULT, &block)
if name == :DEFAULT
name = namespace
namespace = nil
end
@children.each do |child|
if (name.nil? or child.name == name) and
(namespace.nil? or child.namespace == namespace)
yield child
end
child.children(recursive, namespace, name, &block) if recursive
end
return nil
end
private :each_child
# Returns a new Hash where the children's names as keys and their values as the key's value.
# Example:
#
# child1 "toto"
# child2 2
#
# would give
#
# { "child1" => "toto", "child2" => 2 }
#
def to_child_hash
hash = {}
children { |child| hash[child.name] = child.value }
return hash
end
# Returns a new Hash where the children's names as keys and their values as the key's value.
# Values are converted to Strings. +nil+ values become empty Strings.
# Example:
#
# child1 "toto"
# child2 2
# child3 null
#
# would give
#
# { "child1" => "toto", "child2" => "2", "child3" => "" }
#
def to_child_string_hash
hash = {}
children do |child|
# FIXME: it is quite hard to be sure whether we should mimic the Java version
# as there might be a lot of values that don't translate nicely to Strings.
hash[child.name] = child.value.to_s
end
return hash
end
# Adds a value to this Tag. See SDL4R#coerce_or_fail to know about the allowable types.
#
# _v_:: The value to add
#
# Raises an +ArgumentError+ if the value is not a legal SDL type
#
def add_value(v)
@values.push(SDL4R::coerce_or_fail(v))
return nil
end
# Returns true if +v+ is a value of this Tag's.
#
def has_value?(v)
@values.include?(v)
end
# Removes the first occurence of the specified value from this Tag.
#
# _v_:: The value to remove
#
# Returns true If the value exists and is removed
#
def remove_value(v)
index = @values.index(v)
if index
return [email protected]_at(index).nil?
else
return false
end
end
# Removes all values.
#
def clear_values
@values = []
nil
end
# Returns an Array of the values of this Tag or enumerates them.
#
# tag.values # => [123, "spices"]
# tag.values { |value| puts value }
#
def values # :yields: value
if block_given?
@values.each { |v| yield v }
nil
else
return @values
end
end
# Set the values for this tag. See #add_value for legal value types.
#
# _values_:: The new values
#
# Raises an +ArgumentError+ if the collection contains any values which are not legal SDL types.
#
def values=(someValues)
@values.clear()
someValues.to_a.each { |v|
# this is required to ensure validation of types
add_value(v)
}
nil
end
# set_attribute(key, value)
# set_attribute(namespace, key, value)
#
# Set an attribute in the given namespace for this tag. The allowable
# attribute value types are the same as those allowed for #add_value.
#
# _namespace_:: The namespace for this attribute
# _key_:: The attribute key
# _value_:: The attribute value
#
# Raises +ArgumentError+ if the key is not a legal SDL identifier (see
# SDL4R#validate_identifier), or the namespace is non-blank and is not a legal SDL identifier,
# or thevalue is not a legal SDL type
#
def set_attribute(namespace, key, value = :default)
if value == :default
value = key
key = namespace
namespace = ""
end
raise ArgumentError, "attribute namespace must be a String" unless namespace.is_a? String
raise ArgumentError, "attribute key must be a String" unless key.is_a? String
raise ArgumentError, "attribute key cannot be empty" if key.empty?
SDL4R.validate_identifier(namespace) unless namespace.empty?
SDL4R.validate_identifier(key)
attributes = @attributesByNamespace[namespace]
if attributes.nil?
attributes = {}
@attributesByNamespace[namespace] = attributes
end
attributes[key] = SDL4R.coerce_or_fail(value)
end
# attribute(key)
# attribute(namespace, key)
#
# Returns the attribute of the specified +namespace+ of specified +key+ or +nil+ if not found.
#
#
def attribute(namespace, key = nil)
namespace, key = to_nns namespace, key
attributes = @attributesByNamespace[namespace]
return attributes.nil? ? nil : attributes[key]
end
# Indicates whether there is at least an attribute in this Tag.
# has_attribute?
#
# Indicates whether there is the specified attribute exists in this Tag.
# has_attribute?(key)
# has_attribute?(namespace, key)
#
def has_attribute?(namespace = nil, key = nil)
namespace, key = to_nns namespace, key
if namespace or key
attributes = @attributesByNamespace[namespace]
return attributes.nil? ? false : attributes.has_key?(key)
else
attributes { return true }
return false
end
end
# Returns a Hash of the attributes of the specified +namespace+ (default is all) or enumerates
# them.
#
# tag.attributes # => { "length" => 123, "width" = 25.4, "orig:color" => "gray" }
# tag.attributes("orig") do |namespace, key, value|
# p "#{namespace}:#{key} = #{value}"
# end
#
# _namespace_::
# namespace of the returned attributes. If nil, all attributes are returned with
# qualified names (e.g. "meat:color"). If "", attributes of the default namespace are returned.
#
def attributes(namespace = nil, &block) # :yields: namespace, key, value
if block_given?
each_attribute(namespace, &block)
else
if namespace.nil?
hash = {}
each_attribute do | namespace, key, value |
qualified_name = namespace.empty? ? key : namespace + ':' + key
hash[qualified_name] = value
end
return hash
else
return @attributesByNamespace[namespace]
end
end
end
# remove_attribute(key)
# remove_attribute(namespace, key)
#
# Removes the attribute, whose name and namespace are specified.
#
# _key_:: name of the removed atribute
# _namespace_:: namespace of the removed attribute (equal to "", default namespace, by default)
#
# Returns the value of the removed attribute or +nil+ if it didn't exist.
#
def remove_attribute(namespace, key = nil)
namespace, key = to_nns namespace, key
attributes = @attributesByNamespace[namespace]
return attributes.nil? ? nil : attributes.delete(key)
end
# Clears the attributes of the specified namespace or all the attributes if +namespace+ is
# +nil+.
#
def clear_attributes(namespace = nil)
if namespace.nil?
@attributesByNamespace.clear
else
@attributesByNamespace.delete(namespace)
end
end
# Enumerates the attributes for the specified +namespace+.
# Enumerates all the attributes by default.
#
def each_attribute(namespace = nil, &block) # :yields: namespace, key, value
if namespace.nil?
@attributesByNamespace.each_key { |a_namespace| each_attribute(a_namespace, &block) }
else
attributes = @attributesByNamespace[namespace]
unless attributes.nil?
attributes.each_pair do |key, value|
yield namespace, key, value
end
end
end
end
private :each_attribute
# set_attributes(attribute_hash)
# set_attributes(namespace, attribute_hash)
#
# Sets the attributes specified by a Hash in the given +namespace+ in one operation. The
# previous attributes of the specified +namespace+ are removed.
# See #set_attribute for allowable attribute value types.
#
# _attributes_:: a Hash where keys are attribute keys
# _namespace_:: "" (default namespace) by default
#
# Raises an +ArgumentError+ if any key in the map is not a legal SDL identifier
# (see SDL4R#validate_identifier), or any value is not a legal SDL type.
#
def set_attributes(namespace, attribute_hash = nil)
if attribute_hash.nil?
attribute_hash = namespace
namespace = ""
end
raise ArgumentError, "namespace can't be nil" if namespace.nil?
raise ArgumentError, "attribute_hash should be a Hash" unless attribute_hash.is_a? Hash
namespace_attributes = @attributesByNamespace[namespace]
namespace_attributes.clear if namespace_attributes
attribute_hash.each_pair do |key, value|
# Calling set_attribute() is required to ensure validations
set_attribute(namespace, key, value)
end
end
# Sets all the attributes of the default namespace for this Tag in one
# operation.
#
# See #set_attributes.
#
def attributes=(attribute_hash)
set_attributes(attribute_hash)
end
# Sets the name of this Tag.
#
# Raises +ArgumentError+ if the name is not a legal SDL identifier
# (see SDL4R#validate_identifier).
#
def name=(a_name)
a_name = a_name.to_s
SDL4R.validate_identifier(a_name)
@name = a_name
end
# The namespace to set. +nil+ will be coerced to the empty string.
#
# Raises +ArgumentError+ if the namespace is non-blank and is not
# a legal SDL identifier (see SDL4R#validate_identifier)
#
# Adds all the tags specified in the given IO, String, Pathname or URI to this Tag.
#
# Returns this Tag after adding all the children read from +input+.
#
def read(input)
if input.is_a? String
read_from_io(true) { StringIO.new(input) }
elsif input.is_a? Pathname
read_from_io(true) { input.open("r:UTF-8") }
elsif input.is_a? URI
read_from_io(true) { input.open }
else
read_from_io(false) { input }
end
return self
end
# Reads and parses the +io+ returned by the specified block and closes this +io+ if +close_io+
# is true.
def read_from_io(close_io)
io = yield
begin
Parser.new(io).parse.each do |tag|
add_child(tag)
end
ensure
if close_io
io.close rescue IOError
end
end
end
private_methods :read_io
# Write this tag out to the given IO or StringIO or String (optionally clipping the root.)
# Returns +output+.
#
# _output_:: an IO or StringIO or a String to write to
# +include_root+:: if true this tag will be written out as the root element, if false only the
# children will be written. False by default.
#
def write(output, include_root = false)
if output.is_a? String
io = StringIO.new(output)
close_io = true # indicates we close the IO ourselves
elsif output.is_a? IO or output.is_a? StringIO
io = output
close_io = false # let the caller close the IO
else
raise ArgumentError, "'output' should be a String or an IO but was #{output.class}"
end
if include_root
io << to_s
else
first = true
children do |child|
io << $/ unless first
first = false
io << child.to_s
end
end
io.close() if close_io
output
end
# Get a String representation of this SDL Tag. This method returns a
# complete description of the Tag's state using SDL (i.e. the output can
# be parsed by #read)
#
# Returns A string representation of this tag using SDL
#
def to_s
to_string
end
# _linePrefix_:: A prefix to insert before every line.
# Returns A string representation of this tag using SDL
#
# TODO: break up long lines using the backslash
#
def to_string(line_prefix = "", indent = "\t")
line_prefix = "" if line_prefix.nil?
s = ""
s << line_prefix
if name == "content" && namespace.empty?
skip_value_space = true
else
skip_value_space = false
s << "#{namespace}:" unless namespace.empty?
s << name
end
# output values
values do |value|
if skip_value_space
skip_value_space = false
else
s << " "
end
s << SDL4R.format(value, true, line_prefix, indent)
end
# output attributes
unless @attributesByNamespace.empty?
all_attributes_hash = attributes
all_attributes_array = all_attributes_hash.sort { |a, b|
namespace1, name1 = a[0].split(':')
namespace1, name1 = "", namespace1 if name1.nil?
namespace2, name2 = b[0].split(':')
namespace2, name2 = "", namespace2 if name2.nil?
diff = namespace1 <=> namespace2
diff == 0 ? name1 <=> name2 : diff
}
all_attributes_array.each do |attribute_name, attribute_value|
s << " " << attribute_name << '=' << SDL4R.format(attribute_value, true)
end
end
# output children
unless @children.empty?
s << " {#{$/}"
children_to_string(line_prefix + indent, s)
s << line_prefix << ?}
end
return s
end
# Returns a string representation of the children tags.
#
# _linePrefix_:: A prefix to insert before every line.
# _s_:: a String that receives the string representation
#
# TODO: break up long lines using the backslash
#
def children_to_string(line_prefix = "", s = "")
@children.each do |child|
s << child.to_string(line_prefix) << $/
end
return s
end
# Returns true if this tag (including all of its values, attributes, and
# children) is equivalent to the given tag.
#
# Returns true if the tags are equivalet
#
def eql?(o)
# this is safe because to_string() dumps the full state
return o.is_a?(Tag) && o.to_string == to_string;
end
alias_method :==, :eql?
# Returns The hash (based on the output from toString())
#
def hash
return to_string.hash
end
# Returns a string containing an XML representation of this tag. Values
# will be represented using _val0, _val1, etc.
#
# _options_:: a hash of the options
#
# === options:
#
# [:line_prefix] a text prefixing each line (default: "")
# [:uri_by_namespace] a Hash giving the URIs for the namespaces
# [:indent] text specifying one indentation (default: "\t")
# [:eol] end of line expression (default: "\n")
# [:omit_null_attributes]
# if true, null/nil attributes are not exported (default: false). Otherwise, they are exported
# as follows:
# tag attr="null"
#
def to_xml_string(options = {})
options = {
:uri_by_namespace => nil,
:indent => "\t",
:line_prefix => "",
:eol => "\n",
:omit_null_attributes => false
}.merge(options)
_to_xml_string(options[:line_prefix], options)
end
protected
# Implementation of #to_xml_string but without the extra-treatment on parameters for default
# values.
def _to_xml_string(line_prefix, options)
eol = options[:eol]
s = ""
s << line_prefix << ?<
s << "#{namespace}:" unless namespace.empty?
s << name
# output namespace declarations
uri_by_namespace = options[:uri_by_namespace]
if uri_by_namespace
uri_by_namespace.each_pair do |namespace, uri|
if namespace
s << " xmlns:#{namespace}=\"#{uri}\""
else
s << " xmlns=\"#{uri}\""
end
end
end
# output values
unless @values.empty?
i = 0
@values.each do |value|
s << " _val" << i.to_s << "=\"" << SDL4R.format(value, false) << "\""
i += 1
end
end
# output attributes
if has_attribute?
omit_null_attributes = options[:omit_null_attributes]
attributes do |attribute_namespace, attribute_name, attribute_value|
unless omit_null_attributes and attribute_value.nil?
s << " "
s << "#{attribute_namespace}:" unless attribute_namespace.empty?
s << attribute_name << "=\"" << SDL4R.format(attribute_value, false) << ?"
end
end
end
if @children.empty?
s << "/>"
else
s << ">" << eol
@children.each do |child|
s << child._to_xml_string(line_prefix + options[:indent], options) << eol
end
s << line_prefix << "</"
s << "#{namespace}:" unless namespace.empty?
s << name << ?>
end
return s
end
end
|
lambda2/rice_cooker | lib/rice_cooker/base/helpers.rb | RiceCooker.Helpers.format_additional_param | ruby | def format_additional_param(additional, context_format = 'searching')
if additional.is_a? Hash
additional = additional.map do |field, value|
if value.is_a?(Hash)
value = {
proc: nil,
all: [],
description: ''
}.merge(value)
elsif value.is_a? Array
value = {
proc: value.try(:at, 0),
all: value.try(:at, 1) || [],
description: value.try(:at, 2) || ''
}
elsif value.is_a? Proc
value = {
proc: value,
all: [],
description: ''
}
else
raise "Unable to format addional #{context_format} params (got #{additional})"
end
[field, value]
end.to_h
end
additional
end | On va essayer de garder un format commun, qui est:
```
search: {
proc: -> (values) { * je fais des trucs avec les values * },
all: ['les', 'valeurs', 'aceptées'],
description: "La description dans la doc"
}
```
On va donc transformer `additional` dans le format ci-dessus | train | https://github.com/lambda2/rice_cooker/blob/b7ce285d3bd76ae979111f0374c5a43815473332/lib/rice_cooker/base/helpers.rb#L250-L278 | module Helpers
extend ActiveSupport::Concern
# Overridable method for available sortable fields
def sortable_fields_for(model)
if model.respond_to?(:sortable_fields)
model.sortable_fields.map(&:to_sym)
elsif model.respond_to?(:column_names)
model.column_names.map(&:to_sym)
else
[]
end
end
# Overridable method for available filterable fields
def filterable_fields_for(model)
if model.respond_to?(:filterable_fields)
model.filterable_fields.map(&:to_sym)
elsif model.respond_to?(:column_names)
model.column_names.map(&:to_sym)
else
[]
end
end
# Overridable method for available searchable fields
def searchable_fields_for(model)
if model.respond_to?(:searchable_fields)
model.searchable_fields.map(&:to_sym)
else
filterable_fields_for(model)
end
end
# Overridable method for available fuzzy fields
def fuzzy_fields_for(model)
if model.respond_to?(:fuzzy_fields)
model.fuzzy_fields.map(&:to_sym)
else
searchable_fields_for(model)
end
end
# Overridable method for available rangeable fields
def rangeable_fields_for(model)
if model.respond_to?(:rangeable_fields)
model.rangeable_fields.map(&:to_sym)
else
filterable_fields_for(model)
end
end
# ------------------------ Sort helpers --------------------
# model -> resource_class with inherited resources
def parse_sorting_param(sorting_param, model)
return {} unless sorting_param.present?
sorting_params = CSV.parse_line(URI.unescape(sorting_param)).collect do |sort|
sorting_param = if sort.start_with?('-')
{ field: sort[1..-1].to_s.to_sym, direction: :desc }
else
{ field: sort.to_s.to_sym, direction: :asc }
end
check_sorting_param(model, sorting_param)
sorting_param
end
sorting_params.map { |par| [par[:field], par[:direction]] }.to_h
end
def check_sorting_param(model, sorting_param)
sort_field = sorting_param[:field]
sortable_fields = sortable_fields_for(model)
unless sortable_fields.include? sort_field.to_sym
raise InvalidSortException, "The #{sort_field} field is not sortable"
end
end
def param_from_defaults(sorting_params)
sorting_params.map { |k, v| "#{v == :desc ? '-' : ''}#{k}" }.join(',')
end
def apply_sort_to_collection(collection, sorting_params)
return collection unless collection.any?
# p "Before apply: #{sorting_params.inspect}"
collection.order(sorting_params)
end
# ------------------------ Filter helpers --------------------
# Va transformer le param url en hash exploitable
def parse_filtering_param(filtering_param, allowed_params)
return {} unless filtering_param.present?
fields = {}
# Extract the fields for each type from the fields parameters
if filtering_param.is_a?(Hash)
filtering_param.each do |field, value|
resource_fields = value.split(',') unless value.nil? || value.empty?
fields[field.to_sym] = resource_fields
end
else
raise InvalidFilterException, "Invalid filter format for #{filtering_param}"
end
check_filtering_param(fields, allowed_params)
fields
end
# Our little barrier <3
def check_filtering_param(filtering_param, allowed)
🔞 = filtering_param.keys.map(&:to_sym) - allowed.map(&:to_sym)
raise InvalidFilterException, "Attributes #{🔞.map(&:to_s).to_sentence} doesn't exists or aren't filterables. Available filters are: #{allowed.to_sentence}" if 🔞.any?
end
# On va essayer de garder un format commun, qui est:
#
# ```
# filter: {
# proc: -> (values) { * je fais des trucs avec les values * },
# all: ['les', 'valeurs', 'aceptées'],
# description: "La description dans la doc"
# }
# ```
#
# On va donc transformer `additional` dans le format ci-dessus
#
def format_additional_param(additional, context_format = 'filtering')
if additional.is_a? Hash
additional = additional.map do |field, value|
if value.is_a?(Hash)
value = {
proc: nil,
all: [],
description: ''
}.merge(value)
elsif value.is_a? Array
value = {
proc: value.try(:at, 0),
all: value.try(:at, 1) || [],
description: value.try(:at, 2) || ''
}
elsif value.is_a? Proc
value = {
proc: value,
all: [],
description: ''
}
else
raise "Unable to format addional #{context_format} params (got #{additional})"
end
[field, value]
end.to_h
end
additional
end
def apply_filter_to_collection(collection, filtering_params, additional = {})
return collection if collection.nil?
filtering_params.each do |field, value|
puts "Filtering param #{field} -> #{value}"
if additional.key?(field) && additional[field].key?(:proc)
# Si on a fourni des valeurs, on verifie qu'elle matchent
if additional[field].key?(:all) && additional[field][:all].try(:any?)
allowed = additional[field][:all].map(&:to_s)
raise InvalidFilterValueException, "Value #{(value - allowed).to_sentence} is not allowed for filter #{field}, can be #{allowed.to_sentence}" if (value - allowed).any?
end
collection = collection.instance_exec(value, &(additional[field][:proc]))
elsif field =~ /_at$/ && (value.is_a?(String) || value.is_a?(Array))
collection = collection.where("DATE(#{collection.model.table_name}.#{field}) = ?", [*value])
elsif value.is_a?(String) || value.is_a?(Array)
collection = collection.where("#{collection.model.table_name}.#{field}" => value)
elsif value.is_a?(Hash) && value.key?(:proc)
collection
end
end
collection
end
# ------------------------ Search helpers --------------------
# Va transformer le param url en hash exploitable
def parse_searching_param(searching_param, allowed_params)
return {} unless searching_param.present?
fields = {}
# Extract the fields for each type from the fields parameters
if searching_param.is_a?(Hash)
searching_param.each do |field, value|
resource_fields = value.split(',') unless value.nil? || value.empty?
fields[field.to_sym] = resource_fields
end
else
raise InvalidSearchException, "Invalid search format for #{searching_param}"
end
check_searching_param(fields, allowed_params)
fields
end
# Our little barrier <3
def check_searching_param(searching_param, allowed)
🔞 = searching_param.keys.map(&:to_sym) - allowed.map(&:to_sym)
raise InvalidSearchException, "Attributes #{🔞.map(&:to_s).to_sentence} doesn't exists or aren't searchables. Available searchs are: #{allowed.to_sentence}" if 🔞.any?
end
# On va essayer de garder un format commun, qui est:
#
# ```
# search: {
# proc: -> (values) { * je fais des trucs avec les values * },
# all: ['les', 'valeurs', 'aceptées'],
# description: "La description dans la doc"
# }
# ```
#
# On va donc transformer `additional` dans le format ci-dessus
#
def reduce_where(col, field, value)
reducer = nil
value.each do |v|
query = col.model.arel_table[field.to_sym].matches("%#{v.to_s}%")
reducer = (reducer ? reducer.or(query) : query)
end
col.where(reducer)
end
def reduce_fields_where(col, fields, value)
reducer = nil
fields.each do |f|
case col.model.columns.select{|e| e.name.to_sym == f.to_sym}.first.type
when :string
query = col.model.arel_table[f.to_sym].matches("%#{value.to_s}%")
when :integer
query = col.model.arel_table[f.to_sym].eq(value.to_i)
# when :boolean
# query = col.model.where(false)
else
query = false
end
reducer = (reducer ? reducer.or(query) : query) if query
end
col.where(reducer)
end
def apply_search_to_collection(col, searching_params, additional = {})
return col if col.nil?
searching_params.each do |field, value|
if additional.key?(field) && additional[field].key?(:proc)
col = col.instance_exec(value, &(additional[field][:proc]))
elsif value.is_a?(String)
col = (col.where(col.model.arel_table[field.to_sym].matches("%#{value.join(' ')}%")) rescue col)
elsif value.is_a?(Array)
col = reduce_where(col, field, value)
elsif value.is_a?(Hash) && value.key?(:proc)
col
end
end
col
end
# ------------------------ Range helpers --------------------
# Va transformer le param url en hash exploitable
def parse_ranged_param(ranged_param, allowed_params)
return {} unless ranged_param.present?
fields = {}
# Extract the fields for each type from the fields parameters
if ranged_param.is_a?(Hash)
ranged_param.each do |field, value|
resource_fields = value.split(',') unless value.nil? || value.empty?
raise InvalidRangeException, "Invalid range format for #{ranged_param}. Too many arguments for filter (#{resource_fields})." if resource_fields.length > 2
raise InvalidRangeException, "Invalid range format for #{ranged_param}. Begin and end must be separated by a comma (,)." if resource_fields.length < 2
fields[field.to_sym] = resource_fields
end
else
raise InvalidRangeException, "Invalid range format for #{ranged_param}"
end
check_ranged_param(fields, allowed_params)
fields
end
# Our little barrier <3
def check_ranged_param(ranged_param, allowed)
🔞 = ranged_param.keys.map(&:to_sym) - allowed.map(&:to_sym)
raise InvalidRangeException, "Attributes #{🔞.map(&:to_s).to_sentence} doesn't exists or aren't rangeables. Available ranges are: #{allowed.to_sentence}" if 🔞.any?
end
def apply_range_to_collection(collection, ranged_params, additional = {})
return collection if collection.nil?
ranged_params.each do |field, value|
if additional.key?(field) && additional[field].key?(:proc)
# Si on a fourni des valeurs, on verifie qu'elle matchent
if additional[field].key?(:all) && additional[field][:all].try(:any?)
allowed = additional[field][:all].map(&:to_s)
raise InvalidRangeValueException, "
Value #{(value - allowed).to_sentence} is not allowed for range #{field}, can be #{allowed.to_sentence}
" if (value - allowed).any?
end
collection = collection.instance_exec(value.try(:first), value.try(:last), &(additional[field][:proc]))
elsif value.is_a? Array
from, to = value.slice(0, 2)
begin
collection = collection.where("#{collection.model.table_name}.#{field}" => from..to)
rescue ArgumentError
raise InvalidRangeValueException, "
Unable to create a range between values '#{from}' and '#{to}'
"
end
elsif value.is_a?(Hash) && value.key?(:proc)
collection
end
end
collection
end
end
|
mongodb/mongoid | lib/mongoid/threaded.rb | Mongoid.Threaded.set_current_scope | ruby | def set_current_scope(scope, klass)
if scope.nil?
if Thread.current[CURRENT_SCOPE_KEY]
Thread.current[CURRENT_SCOPE_KEY].delete(klass)
Thread.current[CURRENT_SCOPE_KEY] = nil if Thread.current[CURRENT_SCOPE_KEY].empty?
end
else
Thread.current[CURRENT_SCOPE_KEY] ||= {}
Thread.current[CURRENT_SCOPE_KEY][klass] = scope
end
end | Set the current Mongoid scope. Safe for multi-model scope chaining.
@example Set the scope.
Threaded.current_scope(scope, klass)
@param [ Criteria ] scope The current scope.
@param [ Class ] klass The current model class.
@return [ Criteria ] The scope.
@since 5.0.1 | train | https://github.com/mongodb/mongoid/blob/56976e32610f4c2450882b0bfe14da099f0703f4/lib/mongoid/threaded.rb#L263-L273 | module Threaded
DATABASE_OVERRIDE_KEY = "[mongoid]:db-override"
# Constant for the key to store clients.
#
# @since 5.0.0
CLIENTS_KEY = "[mongoid]:clients"
# The key to override the client.
#
# @since 5.0.0
CLIENT_OVERRIDE_KEY = "[mongoid]:client-override"
# The key for the current thread's scope stack.
#
# @since 2.0.0
CURRENT_SCOPE_KEY = "[mongoid]:current-scope"
AUTOSAVES_KEY = "[mongoid]:autosaves"
VALIDATIONS_KEY = "[mongoid]:validations"
STACK_KEYS = Hash.new do |hash, key|
hash[key] = "[mongoid]:#{key}-stack"
end
extend self
# Begin entry into a named thread local stack.
#
# @example Begin entry into the stack.
# Threaded.begin_execution(:create)
#
# @param [ String ] name The name of the stack
#
# @return [ true ] True.
#
# @since 2.4.0
def begin_execution(name)
stack(name).push(true)
end
# Get the global database override.
#
# @example Get the global database override.
# Threaded.database_override
#
# @return [ String, Symbol ] The override.
#
# @since 3.0.0
def database_override
Thread.current[DATABASE_OVERRIDE_KEY]
end
# Set the global database override.
#
# @example Set the global database override.
# Threaded.database_override = :testing
#
# @param [ String, Symbol ] name The global override name.
#
# @return [ String, Symbol ] The override.
#
# @since 3.0.0
def database_override=(name)
Thread.current[DATABASE_OVERRIDE_KEY] = name
end
# Are in the middle of executing the named stack
#
# @example Are we in the stack execution?
# Threaded.executing?(:create)
#
# @param [ Symbol ] name The name of the stack
#
# @return [ true ] If the stack is being executed.
#
# @since 2.4.0
def executing?(name)
!stack(name).empty?
end
# Exit from a named thread local stack.
#
# @example Exit from the stack.
# Threaded.exit_execution(:create)
#
# @param [ Symbol ] name The name of the stack
#
# @return [ true ] True.
#
# @since 2.4.0
def exit_execution(name)
stack(name).pop
end
# Get the named stack.
#
# @example Get a stack by name
# Threaded.stack(:create)
#
# @param [ Symbol ] name The name of the stack
#
# @return [ Array ] The stack.
#
# @since 2.4.0
def stack(name)
Thread.current[STACK_KEYS[name]] ||= []
end
# Begin autosaving a document on the current thread.
#
# @example Begin autosave.
# Threaded.begin_autosave(doc)
#
# @param [ Document ] document The document to autosave.
#
# @since 3.0.0
def begin_autosave(document)
autosaves_for(document.class).push(document._id)
end
# Begin validating a document on the current thread.
#
# @example Begin validation.
# Threaded.begin_validate(doc)
#
# @param [ Document ] document The document to validate.
#
# @since 2.1.9
def begin_validate(document)
validations_for(document.class).push(document._id)
end
# Exit autosaving a document on the current thread.
#
# @example Exit autosave.
# Threaded.exit_autosave(doc)
#
# @param [ Document ] document The document to autosave.
#
# @since 3.0.0
def exit_autosave(document)
autosaves_for(document.class).delete_one(document._id)
end
# Exit validating a document on the current thread.
#
# @example Exit validation.
# Threaded.exit_validate(doc)
#
# @param [ Document ] document The document to validate.
#
# @since 2.1.9
def exit_validate(document)
validations_for(document.class).delete_one(document._id)
end
# Begin suppressing default scopes for given model on the current thread.
#
# @example Begin without default scope stack.
# Threaded.begin_without_default_scope(klass)
#
# @param [ Class ] klass The model to suppress default scoping on.
#
# @api private
def begin_without_default_scope(klass)
stack(:without_default_scope).push(klass)
end
# Exit suppressing default scopes for given model on the current thread.
#
# @example Exit without default scope stack.
# Threaded.exit_without_default_scope(klass)
#
# @param [ Class ] klass The model to unsuppress default scoping on.
#
# @api private
def exit_without_default_scope(klass)
stack(:without_default_scope).delete(klass)
end
# Get the global client override.
#
# @example Get the global client override.
# Threaded.client_override
#
# @return [ String, Symbol ] The override.
#
# @since 5.0.0
def client_override
Thread.current[CLIENT_OVERRIDE_KEY]
end
# Set the global client override.
#
# @example Set the global client override.
# Threaded.client_override = :testing
#
# @param [ String, Symbol ] name The global override name.
#
# @return [ String, Symbol ] The override.
#
# @since 3.0.0
def client_override=(name)
Thread.current[CLIENT_OVERRIDE_KEY] = name
end
# Get the current Mongoid scope.
#
# @example Get the scope.
# Threaded.current_scope(klass)
# Threaded.current_scope
#
# @param [ Klass ] klass The class type of the scope.
#
# @return [ Criteria ] The scope.
#
# @since 5.0.0
def current_scope(klass = nil)
if klass && Thread.current[CURRENT_SCOPE_KEY].respond_to?(:keys)
Thread.current[CURRENT_SCOPE_KEY][
Thread.current[CURRENT_SCOPE_KEY].keys.find { |k| k <= klass }
]
else
Thread.current[CURRENT_SCOPE_KEY]
end
end
# Set the current Mongoid scope.
#
# @example Set the scope.
# Threaded.current_scope = scope
#
# @param [ Criteria ] scope The current scope.
#
# @return [ Criteria ] The scope.
#
# @since 5.0.0
def current_scope=(scope)
Thread.current[CURRENT_SCOPE_KEY] = scope
end
# Set the current Mongoid scope. Safe for multi-model scope chaining.
#
# @example Set the scope.
# Threaded.current_scope(scope, klass)
#
# @param [ Criteria ] scope The current scope.
# @param [ Class ] klass The current model class.
#
# @return [ Criteria ] The scope.
#
# @since 5.0.1
# Is the given klass' default scope suppressed on the current thread?
#
# @example Is the given klass' default scope suppressed?
# Threaded.without_default_scope?(klass)
#
# @param [ Class ] klass The model to check for default scope suppression.
#
# @api private
def without_default_scope?(klass)
stack(:without_default_scope).include?(klass)
end
# Is the document autosaved on the current thread?
#
# @example Is the document autosaved?
# Threaded.autosaved?(doc)
#
# @param [ Document ] document The document to check.
#
# @return [ true, false ] If the document is autosaved.
#
# @since 2.1.9
def autosaved?(document)
autosaves_for(document.class).include?(document._id)
end
# Is the document validated on the current thread?
#
# @example Is the document validated?
# Threaded.validated?(doc)
#
# @param [ Document ] document The document to check.
#
# @return [ true, false ] If the document is validated.
#
# @since 2.1.9
def validated?(document)
validations_for(document.class).include?(document._id)
end
# Get all autosaves on the current thread.
#
# @example Get all autosaves.
# Threaded.autosaves
#
# @return [ Hash ] The current autosaves.
#
# @since 3.0.0
def autosaves
Thread.current[AUTOSAVES_KEY] ||= {}
end
# Get all validations on the current thread.
#
# @example Get all validations.
# Threaded.validations
#
# @return [ Hash ] The current validations.
#
# @since 2.1.9
def validations
Thread.current[VALIDATIONS_KEY] ||= {}
end
# Get all autosaves on the current thread for the class.
#
# @example Get all autosaves.
# Threaded.autosaves_for(Person)
#
# @param [ Class ] klass The class to check.
#
# @return [ Array ] The current autosaves.
#
# @since 3.0.0
def autosaves_for(klass)
autosaves[klass] ||= []
end
# Get all validations on the current thread for the class.
#
# @example Get all validations.
# Threaded.validations_for(Person)
#
# @param [ Class ] klass The class to check.
#
# @return [ Array ] The current validations.
#
# @since 2.1.9
def validations_for(klass)
validations[klass] ||= []
end
# Cache a session for this thread.
#
# @example Save a session for this thread.
# Threaded.set_session(session)
#
# @param [ Mongo::Session ] session The session to save.
#
# @since 6.4.0
def set_session(session)
Thread.current[:session] = session
end
# Get the cached session for this thread.
#
# @example Get the session for this thread.
# Threaded.get_session
#
# @return [ Mongo::Session, nil ] The session cached on this thread or nil.
#
# @since 6.4.0
def get_session
Thread.current[:session]
end
# Clear the cached session for this thread.
#
# @example Clear this thread's session.
# Threaded.clear_session
#
# @return [ nil ]
#
# @since 6.4.0
def clear_session
session = get_session
session.end_session if session
Thread.current[:session] = nil
end
end
|
kontena/kontena | agent/lib/kontena/network_adapters/weave.rb | Kontena::NetworkAdapters.Weave.migrate_container | ruby | def migrate_container(container_id, cidr, attached_cidrs)
# first remove any existing addresses
# this is required, since weave will not attach if the address already exists, but with a different netmask
attached_cidrs.each do |attached_cidr|
if cidr != attached_cidr
warn "Migrate container=#{container_id} from cidr=#{attached_cidr}"
@executor_pool.detach(container_id, attached_cidr)
end
end
# attach with the correct address
self.attach_container(container_id, cidr)
end | Attach container to weave with given CIDR address, first detaching any existing mismatching addresses
@param [String] container_id
@param [String] overlay_cidr '10.81.X.Y/16'
@param [Array<String>] migrate_cidrs ['10.81.X.Y/19'] | train | https://github.com/kontena/kontena/blob/5cb5b4457895985231ac88e78c8cbc5a8ffb5ec7/agent/lib/kontena/network_adapters/weave.rb#L312-L324 | class Weave
include Celluloid
include Celluloid::Notifications
include Kontena::Observer::Helper
include Kontena::Helpers::IfaceHelper
include Kontena::Helpers::WeaveHelper
include Kontena::Logging
DEFAULT_NETWORK = 'kontena'.freeze
finalizer :finalizer
def initialize(autostart = true)
@images_exist = false
@starting = false
@started = false
info 'initialized'
subscribe('ipam:start', :on_ipam_start)
async.ensure_images if autostart
@ipam_client = IpamClient.new
# Default size of pool is number of CPU cores, 2 for 1 core machine
@executor_pool = WeaveExecutor.pool(args: [autostart])
async.start if autostart
end
def start
observe(Actor[:node_info_worker].observable) do |node|
async.launch(node)
end
end
def finalizer
@executor_pool.terminate if @executor_pool.alive?
rescue
# If Celluloid manages to terminate the pool (through GC or by explicit shutdown) it will raise
end
def api_client
@api_client ||= Excon.new("http://127.0.0.1:6784")
end
def weave_api_ready?
# getting status should be pretty fast, set low timeouts to fail faster
response = api_client.get(path: '/status', :connect_timeout => 5, :read_timeout => 5)
response.status == 200
rescue Excon::Error
false
end
# @return [Boolean]
def running?
return false unless weave_container_running?
return false unless weave_api_ready?
return false unless interface_ip('weave')
true
end
def network_ready?
return false unless running?
return false unless Actor[:ipam_plugin_launcher].running?
true
end
# @return [Boolean]
def weave_container_running?
weave = Docker::Container.get('weave') rescue nil
return false if weave.nil?
return false unless weave.running?
true
end
# @return [Boolean]
def images_exist?
@images_exist == true
end
# @return [Boolean]
def already_started?
@started == true
end
# @return [Boolean]
def starting?
@starting == true
end
# @param [Hash] opts
def modify_create_opts(opts)
ensure_weave_wait
image = Docker::Image.get(opts['Image'])
image_config = image.info['Config']
cmd = []
if opts['Entrypoint']
if opts['Entrypoint'].is_a?(Array)
cmd = cmd + opts['Entrypoint']
else
cmd = cmd + [opts['Entrypoint']]
end
end
if !opts['Entrypoint'] && image_config['Entrypoint'] && image_config['Entrypoint'].size > 0
cmd = cmd + image_config['Entrypoint']
end
if opts['Cmd'] && opts['Cmd'].size > 0
if opts['Cmd'].is_a?(Array)
cmd = cmd + opts['Cmd']
else
cmd = cmd + [opts['Cmd']]
end
elsif image_config['Cmd'] && image_config['Cmd'].size > 0
cmd = cmd + image_config['Cmd']
end
opts['Entrypoint'] = ['/w/w']
opts['Cmd'] = cmd
modify_host_config(opts)
# IPAM
overlay_cidr = @ipam_client.reserve_address(DEFAULT_NETWORK)
info "Create container=#{opts['name']} in network=#{DEFAULT_NETWORK} with overlay_cidr=#{overlay_cidr}"
opts['Labels']['io.kontena.container.overlay_cidr'] = overlay_cidr
opts['Labels']['io.kontena.container.overlay_network'] = DEFAULT_NETWORK
opts
end
# @param [Hash] opts
def modify_host_config(opts)
host_config = opts['HostConfig'] || {}
host_config['VolumesFrom'] ||= []
host_config['VolumesFrom'] << "weavewait-#{WEAVE_VERSION}:ro"
dns = interface_ip('docker0')
if dns && host_config['NetworkMode'].to_s != 'host'.freeze
host_config['Dns'] = [dns]
end
opts['HostConfig'] = host_config
end
# @param [String] topic
# @param [Node] node
def on_ipam_start(topic, node)
ensure_default_pool(node.grid)
Celluloid::Notifications.publish('network:ready', nil)
end
# Ensure that the host weave bridge is exposed using the given CIDR address,
# and only the given CIDR address
#
# @param [String] cidr '10.81.0.X/16'
def ensure_exposed(cidr)
# configure new address
# these will be added alongside any existing addresses
if @executor_pool.expose(cidr)
info "Exposed host node at cidr=#{cidr}"
else
error "Failed to expose host node at cidr=#{cidr}"
end
# cleanup any old addresses
@executor_pool.ps('weave:expose') do |name, mac, *cidrs|
cidrs.each do |exposed_cidr|
if exposed_cidr != cidr
warn "Migrating host node from cidr=#{exposed_cidr}"
@executor_pool.hide(exposed_cidr)
end
end
end
end
def ensure_default_pool(grid_info)
grid_subnet = IPAddr.new(grid_info['subnet'])
_, upper = grid_subnet.split
info "network and ipam ready, ensuring default network with subnet=#{grid_subnet.to_cidr} iprange=#{upper.to_cidr}"
@default_pool = @ipam_client.reserve_pool(DEFAULT_NETWORK, grid_subnet.to_cidr, upper.to_cidr)
end
def launch(node)
wait_until("weave is ready to start") { images_exist? && !starting? }
@starting = true
restarting = false
weave = Docker::Container.get('weave') rescue nil
if weave && config_changed?(weave, node)
info "weave image or configuration has been changed, restarting"
restarting = true
weave.delete(force: true)
weave = nil
end
peer_ips = node.peer_ips || []
trusted_subnets = node.grid['trusted_subnets']
until weave && weave.running? do
exec_params = [
'--local', 'launch-router', '--ipalloc-range', '', '--dns-domain', 'kontena.local',
'--password', ENV['KONTENA_TOKEN'], '--conn-limit', '0'
]
exec_params += ['--trusted-subnets', trusted_subnets.join(',')] if trusted_subnets
@executor_pool.execute(exec_params)
weave = Docker::Container.get('weave') rescue nil
wait_until("weave started", timeout: 10, interval: 1) {
weave && weave.running?
}
if weave.nil? || !weave.running?
@executor_pool.execute(['--local', 'reset'])
end
end
attach_router unless interface_ip('weave')
connect_peers(peer_ips)
info "using trusted subnets: #{trusted_subnets.join(',')}" if trusted_subnets.size > 0 && !already_started?
post_start(node)
if !already_started?
# only publish once on agent boot, or after a crash and actor restart
Celluloid::Notifications.publish('network_adapter:start', node)
elsif restarting
Celluloid::Notifications.publish('network_adapter:restart', node)
end
@started = true
node
rescue => exc
error "#{exc.class.name}: #{exc.message}"
error exc.backtrace.join("\n")
ensure
@starting = false
end
def attach_router
info "attaching router"
@executor_pool.execute(['--local', 'attach-router'])
end
# @param [Array<String>] peer_ips
def connect_peers(peer_ips)
if peer_ips.size > 0
@executor_pool.execute(['--local', 'connect', '--replace'] + peer_ips)
info "router connected to peers #{peer_ips.join(', ')}"
else
info "router does not have any known peers"
end
end
# @param [Node] node
def post_start(node)
grid_subnet = IPAddr.new(node.grid['subnet'])
overlay_ip = node.overlay_ip
if grid_subnet && overlay_ip
weave_cidr = "#{overlay_ip}/#{grid_subnet.prefixlen}"
ensure_exposed(weave_cidr)
end
end
# @param [Docker::Container] weave
# @param [Node] node
def config_changed?(weave, node)
return true if weave.config['Image'].split(':')[1] != WEAVE_VERSION
cmd = Hash[*weave.config['Cmd'].flatten(1)]
return true if cmd['--trusted-subnets'] != node.grid['trusted_subnets'].to_a.join(',')
return true if cmd['--conn-limit'].nil?
false
end
# Inspect current state of attached containers
#
# @return [Hash<String, String>] container_id[0..12] => [overlay_cidr]
def get_containers
containers = { }
@executor_pool.ps() do |id, mac, *cidrs|
next if id == 'weave:expose'
containers[id] = cidrs
end
containers
end
# Attach container to weave with given CIDR address
#
# @param [String] container_id
# @param [String] overlay_cidr '10.81.X.Y/16'
def attach_container(container_id, cidr)
info "Attach container=#{container_id} at cidr=#{cidr}"
@executor_pool.async.attach(container_id, cidr)
end
# Attach container to weave with given CIDR address, first detaching any existing mismatching addresses
#
# @param [String] container_id
# @param [String] overlay_cidr '10.81.X.Y/16'
# @param [Array<String>] migrate_cidrs ['10.81.X.Y/19']
# Remove container from weave network
#
# @param [String] container_id may not exist anymore
# @param [Hash] labels Docker container labels
def remove_container(container_id, overlay_network, overlay_cidr)
info "Remove container=#{container_id} from network=#{overlay_network} at cidr=#{overlay_cidr}"
@ipam_client.release_address(overlay_network, overlay_cidr)
rescue IpamError => error
# Cleanup will take care of these later on
warn "Failed to release container=#{container_id} from network=#{overlay_network} at cidr=#{overlay_cidr}: #{error}"
end
private
def ensure_images
images = [
weave_image
]
images.each do |image|
unless Docker::Image.exist?(image)
info "pulling #{image}"
Docker::Image.create({'fromImage' => image})
sleep 1 until Docker::Image.exist?(image)
info "image #{image} pulled "
end
end
@images_exist = true
end
def ensure_weave_wait
sleep 1 until images_exist?
container_name = "weavewait-#{WEAVE_VERSION}"
weave_wait = Docker::Container.get(container_name) rescue nil
unless weave_wait
Docker::Container.create(
'name' => container_name,
'Image' => weave_exec_image,
'Entrypoint' => ['/bin/false'],
'Labels' => {
'weavevolumes' => ''
},
'Volumes' => {
'/w' => {},
'/w-noop' => {},
'/w-nomcast' => {}
}
)
end
end
end
|
bitbucket-rest-api/bitbucket | lib/bitbucket_rest_api/issues.rb | BitBucket.Issues.edit | ruby | def edit(user_name, repo_name, issue_id, params={ })
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of issue_id
normalize! params
# _merge_mime_type(:issue, params)
filter! VALID_ISSUE_PARAM_NAMES, params
put_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/#{issue_id}/", params)
end | Edit an issue
= Inputs
<tt>:title</tt> - Required string
<tt>:content</tt> - Optional string
<tt>:responsible</tt> - Optional string - Login for the user that this issue should be assigned to.
<tt>:milestone</tt> - Optional number - Milestone to associate this issue with
<tt>:version</tt> - Optional number - Version to associate this issue with
<tt>:component</tt> - Optional number - Component to associate this issue with
<tt>:priority</tt> - Optional string - The priority of this issue
* <tt>trivial</tt>
* <tt>minor</tt>
* <tt>major</tt>
* <tt>critical</tt>
* <tt>blocker</tt>
<tt>:status</tt> - Optional string - The status of this issue
* <tt>new</tt>
* <tt>open</tt>
* <tt>resolved</tt>
* <tt>on hold</tt>
* <tt>invalid</tt>
* <tt>duplicate</tt>
* <tt>wontfix</tt>
<tt>:kind</tt> - Optional string - The kind of issue
* <tt>bug</tt>
* <tt>enhancement</tt>
* <tt>proposal</tt>
* <tt>task</tt>
= Examples
bitbucket = BitBucket.new :user => 'user-name', :repo => 'repo-name'
bitbucket.issues.create
"title" => "Found a bug",
"content" => "I'm having a problem with this.",
"responsible" => "octocat",
"milestone" => 1,
"priority" => "blocker" | train | https://github.com/bitbucket-rest-api/bitbucket/blob/e03b6935104d59b3d9a922474c3dc210a5ef76d2/lib/bitbucket_rest_api/issues.rb#L217-L227 | class Issues < API
extend AutoloadHelper
autoload_all 'bitbucket_rest_api/issues',
:Comments => 'comments',
:Components => 'components',
:Milestones => 'milestones'
VALID_ISSUE_PARAM_NAMES = %w[
title
content
component
milestone
version
responsible
priority
status
kind
limit
start
search
sort
reported_by
].freeze
VALID_ISSUE_PARAM_VALUES = {
'priority' => %w[ trivial minor major critical blocker ],
'status' => ['new', 'open', 'resolved', 'on hold', 'invalid', 'duplicate', 'wontfix'],
'kind' => %w[ bug enhancement proposal task ]
}
# Creates new Issues API
def initialize(options = { })
super(options)
end
# Access to Issues::Comments API
def comments
@comments ||= ApiFactory.new 'Issues::Comments'
end
# Access to Issues::Components API
def components
@components ||= ApiFactory.new 'Issues::Components'
end
# Access to Issues::Milestones API
def milestones
@milestones ||= ApiFactory.new 'Issues::Milestones'
end
# List issues for a repository
#
# = Inputs
# <tt>:limit</tt> - Optional - Number of issues to retrieve, default 15
# <tt>:start</tt> - Optional - Issue offset, default 0
# <tt>:search</tt> - Optional - A string to search for
# <tt>:sort</tt> - Optional - Sorts the output by any of the metadata fields
# <tt>:title</tt> - Optional - Contains a filter operation to restrict the list of issues by the issue title
# <tt>:content</tt> - Optional - Contains a filter operation to restrict the list of issues by the issue content
# <tt>:version</tt> - Optional - Contains an is or ! ( is not) filter to restrict the list of issues by the version
# <tt>:milestone</tt> - Optional - Contains an is or ! ( is not) filter to restrict the list of issues by the milestone
# <tt>:component</tt> - Optional - Contains an is or ! ( is not) filter to restrict the list of issues by the component
# <tt>:kind</tt> - Optional - Contains an is or ! ( is not) filter to restrict the list of issues by the issue kind
# <tt>:status</tt> - Optional - Contains an is or ! ( is not) filter to restrict the list of issues by the issue status
# <tt>:responsible</tt> - Optional - Contains an is or ! ( is not) filter to restrict the list of issues by the user responsible
# <tt>:reported_by</tt> - Optional - Contains a filter operation to restrict the list of issues by the user that reported the issue
#
# = Examples
# bitbucket = BitBucket.new :user => 'user-name', :repo => 'repo-name'
# bitbucket.issues.list_repo :filter => 'kind=bug&kind=enhancement'
#
def list_repo(user_name, repo_name, params={ })
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
filter! VALID_ISSUE_PARAM_NAMES, params
# _merge_mime_type(:issue, params)
assert_valid_values(VALID_ISSUE_PARAM_VALUES, params)
response = get_request("/1.0/repositories/#{user}/#{repo.downcase}/issues", params)
return response.issues unless block_given?
response.issues.each { |el| yield el }
end
alias :list_repository :list_repo
# Get a single issue
#
# = Examples
# bitbucket = BitBucket.new
# bitbucket.issues.get 'user-name', 'repo-name', 'issue-id'
#
def get(user_name, repo_name, issue_id, params={ })
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of issue_id
normalize! params
# _merge_mime_type(:issue, params)
get_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/#{issue_id}", params)
end
alias :find :get
# Delete a single issue
#
# = Examples
# bitbucket = BitBucket.new
# bitbucket.issues.delete 'user-name', 'repo-name', 'issue-id'
#
def delete(user_name, repo_name, issue_id, params={ })
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of issue_id
normalize! params
# _merge_mime_type(:issue, params)
delete_request("/1.0/repositories/#{user}/#{repo}/issues/#{issue_id}", params)
end
# Create an issue
#
# = Inputs
# <tt>:title</tt> - Required string
# <tt>:content</tt> - Optional string
# <tt>:responsible</tt> - Optional string - Login for the user that this issue should be assigned to.
# <tt>:milestone</tt> - Optional number - Milestone to associate this issue with
# <tt>:version</tt> - Optional number - Version to associate this issue with
# <tt>:component</tt> - Optional number - Component to associate this issue with
# <tt>:priority</tt> - Optional string - The priority of this issue
# * <tt>trivial</tt>
# * <tt>minor</tt>
# * <tt>major</tt>
# * <tt>critical</tt>
# * <tt>blocker</tt>
# <tt>:status</tt> - Optional string - The status of this issue
# * <tt>new</tt>
# * <tt>open</tt>
# * <tt>resolved</tt>
# * <tt>on hold</tt>
# * <tt>invalid</tt>
# * <tt>duplicate</tt>
# * <tt>wontfix</tt>
# <tt>:kind</tt> - Optional string - The kind of issue
# * <tt>bug</tt>
# * <tt>enhancement</tt>
# * <tt>proposal</tt>
# * <tt>task</tt>
#
# = Examples
# bitbucket = BitBucket.new :user => 'user-name', :repo => 'repo-name'
# bitbucket.issues.create
# "title" => "Found a bug",
# "content" => "I'm having a problem with this.",
# "responsible" => "octocat",
# "milestone" => 1,
# "priority" => "blocker"
#
def create(user_name, repo_name, params={ })
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
_merge_user_into_params!(params) unless params.has_key?('user')
# _merge_mime_type(:issue, params)
filter! VALID_ISSUE_PARAM_NAMES, params
assert_required_keys(%w[ title ], params)
post_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/", params)
end
# Edit an issue
#
# = Inputs
# <tt>:title</tt> - Required string
# <tt>:content</tt> - Optional string
# <tt>:responsible</tt> - Optional string - Login for the user that this issue should be assigned to.
# <tt>:milestone</tt> - Optional number - Milestone to associate this issue with
# <tt>:version</tt> - Optional number - Version to associate this issue with
# <tt>:component</tt> - Optional number - Component to associate this issue with
# <tt>:priority</tt> - Optional string - The priority of this issue
# * <tt>trivial</tt>
# * <tt>minor</tt>
# * <tt>major</tt>
# * <tt>critical</tt>
# * <tt>blocker</tt>
# <tt>:status</tt> - Optional string - The status of this issue
# * <tt>new</tt>
# * <tt>open</tt>
# * <tt>resolved</tt>
# * <tt>on hold</tt>
# * <tt>invalid</tt>
# * <tt>duplicate</tt>
# * <tt>wontfix</tt>
# <tt>:kind</tt> - Optional string - The kind of issue
# * <tt>bug</tt>
# * <tt>enhancement</tt>
# * <tt>proposal</tt>
# * <tt>task</tt>
#
# = Examples
# bitbucket = BitBucket.new :user => 'user-name', :repo => 'repo-name'
# bitbucket.issues.create
# "title" => "Found a bug",
# "content" => "I'm having a problem with this.",
# "responsible" => "octocat",
# "milestone" => 1,
# "priority" => "blocker"
#
end # Issues
|
Falkor/falkorlib | lib/falkorlib/common.rb | FalkorLib.Common.exec_or_exit | ruby | def exec_or_exit(cmd)
status = execute(cmd)
if (status.to_i.nonzero?)
error("The command '#{cmd}' failed with exit status #{status.to_i}")
end
status
end | execute_in_dir
Execute a given command - exit if status != 0 | train | https://github.com/Falkor/falkorlib/blob/1a6d732e8fd5550efb7c98a87ee97fcd2e051858/lib/falkorlib/common.rb#L152-L158 | module Common
module_function
##################################
### Default printing functions ###
##################################
# Print a text in bold
def bold(str)
(COLOR == true) ? Term::ANSIColor.bold(str) : str
end
# Print a text in green
def green(str)
(COLOR == true) ? Term::ANSIColor.green(str) : str
end
# Print a text in red
def red(str)
(COLOR == true) ? Term::ANSIColor.red(str) : str
end
# Print a text in cyan
def cyan(str)
(COLOR == true) ? Term::ANSIColor.cyan(str) : str
end
# Print an info message
def info(str)
puts green("[INFO] " + str)
end
# Print an warning message
def warning(str)
puts cyan("/!\\ WARNING: " + str)
end
alias_method :warn, :warning
## Print an error message and abort
def error(str)
#abort red("*** ERROR *** " + str)
$stderr.puts red("*** ERROR *** " + str)
exit 1
end
## simple helper text to mention a non-implemented feature
def not_implemented
error("NOT YET IMPLEMENTED")
end
##############################
### Interaction functions ###
##############################
## Ask a question
def ask(question, default_answer = '')
return default_answer if FalkorLib.config[:no_interaction]
print "#{question} "
print "[Default: #{default_answer}]" unless default_answer == ''
print ": "
STDOUT.flush
answer = STDIN.gets.chomp
(answer.empty?) ? default_answer : answer
end
## Ask whether or not to really continue
def really_continue?(default_answer = 'Yes')
return if FalkorLib.config[:no_interaction]
pattern = (default_answer =~ /yes/i) ? '(Y|n)' : '(y|N)'
answer = ask( cyan("=> Do you really want to continue #{pattern}?"), default_answer)
exit 0 if answer =~ /n.*/i
end
############################
### Execution functions ###
############################
## Check for the presence of a given command
def command?(name)
`which #{name}`
$?.success?
end
## Execute a given command, return exit code and print nicely stdout and stderr
def nice_execute(cmd)
puts bold("[Running] #{cmd.gsub(/^\s*/, ' ')}")
stdout, stderr, exit_status = Open3.capture3( cmd )
unless stdout.empty?
stdout.each_line do |line|
print "** [out] #{line}"
$stdout.flush
end
end
unless stderr.empty?
stderr.each_line do |line|
$stderr.print red("** [err] #{line}")
$stderr.flush
end
end
exit_status
end
# Simpler version that use the system call
def execute(cmd)
puts bold("[Running] #{cmd.gsub(/^\s*/, ' ')}")
system(cmd)
$?.exitstatus
end
## Execute in a given directory
def execute_in_dir(path, cmd)
exit_status = 0
Dir.chdir(path) do
exit_status = run %( #{cmd} )
end
exit_status
end # execute_in_dir
## Execute a given command - exit if status != 0
## "Nice" way to present run commands
## Ex: run %{ hostname -f }
def run(cmds)
exit_status = 0
puts bold("[Running]\n#{cmds.gsub(/^\s*/, ' ')}")
$stdout.flush
#puts cmds.split(/\n */).inspect
cmds.split(/\n */).each do |cmd|
next if cmd.empty?
system(cmd.to_s) unless FalkorLib.config.debug
exit_status = $?.exitstatus
end
exit_status
end
## List items from a glob pattern to ask for a unique choice
# Supported options:
# :only_files [boolean]: list only files in the glob
# :only_dirs [boolean]: list only directories in the glob
# :pattern_include [array of strings]: pattern(s) to include for listing
# :pattern_exclude [array of strings]: pattern(s) to exclude for listing
# :text [string]: text to put
def list_items(glob_pattern, options = {})
list = { 0 => 'Exit' }
index = 1
raw_list = { 0 => 'Exit' }
Dir[glob_pattern.to_s].each do |elem|
#puts "=> element '#{elem}' - dir = #{File.directory?(elem)}; file = #{File.file?(elem)}"
next if (!options[:only_files].nil?) && options[:only_files] && File.directory?(elem)
next if (!options[:only_dirs].nil?) && options[:only_dirs] && File.file?(elem)
entry = File.basename(elem)
# unless options[:pattern_include].nil?
# select_entry = false
# options[:pattern_include].each do |pattern|
# #puts "considering pattern '#{pattern}' on entry '#{entry}'"
# select_entry |= entry =~ /#{pattern}/
# end
# next unless select_entry
# end
unless options[:pattern_exclude].nil?
select_entry = false
options[:pattern_exclude].each do |pattern|
#puts "considering pattern '#{pattern}' on entry '#{entry}'"
select_entry |= entry =~ /#{pattern}/
end
next if select_entry
end
#puts "selected entry = '#{entry}'"
list[index] = entry
raw_list[index] = elem
index += 1
end
text = (options[:text].nil?) ? "select the index" : options[:text]
default_idx = (options[:default].nil?) ? 0 : options[:default]
raise SystemExit, 'Empty list' if index == 1
#ap list
#ap raw_list
# puts list.to_yaml
# answer = ask("=> #{text}", "#{default_idx}")
# raise SystemExit.new('exiting selection') if answer == '0'
# raise RangeError.new('Undefined index') if Integer(answer) >= list.length
# raw_list[Integer(answer)]
select_from(list, text, default_idx, raw_list)
end
## Display a indexed list to select an i
def select_from(list, text = 'Select the index', default_idx = 0, raw_list = list)
error "list and raw_list differs in size" if list.size != raw_list.size
l = list
raw_l = raw_list
if list.is_a?(Array)
l = raw_l = { 0 => 'Exit' }
list.each_with_index do |e, idx|
l[idx + 1] = e
raw_l[idx + 1] = raw_list[idx]
end
end
puts l.to_yaml
answer = ask("=> #{text}", default_idx.to_s)
raise SystemExit, 'exiting selection' if answer == '0'
raise RangeError, 'Undefined index' if Integer(answer) >= l.length
raw_l[Integer(answer)]
end # select_from
## Display a indexed list to select multiple indexes
def select_multiple_from(list, text = 'Select the index', default_idx = 1, raw_list = list)
error "list and raw_list differs in size" if list.size != raw_list.size
l = list
raw_l = raw_list
if list.is_a?(Array)
l = raw_l = { 0 => 'Exit', 1 => 'End of selection' }
list.each_with_index do |e, idx|
l[idx + 2] = e
raw_l[idx + 2] = raw_list[idx]
end
end
puts l.to_yaml
choices = Array.new
answer = 0
begin
choices.push(raw_l[Integer(answer)]) if Integer(answer) > 1
answer = ask("=> #{text}", default_idx.to_s)
raise SystemExit, 'exiting selection' if answer == '0'
raise RangeError, 'Undefined index' if Integer(answer) >= l.length
end while Integer(answer) != 1
choices
end # select_multiple_from
###############################
### YAML File loading/store ###
###############################
# Return the yaml content as a Hash object
def load_config(file)
unless File.exist?(file)
raise FalkorLib::Error, "Unable to find the YAML file '#{file}'"
end
loaded = YAML.load_file(file)
unless loaded.is_a?(Hash)
raise FalkorLib::Error, "Corrupted or invalid YAML file '#{file}'"
end
loaded
end
# Store the Hash object as a Yaml file
# Supported options:
# :header [string]: additional info to place in the header of the (stored) file
# :no_interaction [boolean]: do not interact
def store_config(filepath, hash, options = {})
content = "# " + File.basename(filepath) + "\n"
content += "# /!\\ DO NOT EDIT THIS FILE: it has been automatically generated\n"
if options[:header]
options[:header].split("\n").each { |line| content += "# #{line}" }
end
content += hash.to_yaml
show_diff_and_write(content, filepath, options)
# File.open( filepath, 'w') do |f|
# f.print "# ", File.basename(filepath), "\n"
# f.puts "# /!\\ DO NOT EDIT THIS FILE: it has been automatically generated"
# if options[:header]
# options[:header].split("\n").each do |line|
# f.puts "# #{line}"
# end
# end
# f.puts hash.to_yaml
# end
end
#################################
### [ERB] template generation ###
#################################
# Bootstrap the destination directory `rootdir` using the template
# directory `templatedir`. the hash table `config` hosts the elements to
# feed ERB files which **should** have the extension .erb.
# The initialization is performed as follows:
# * a rsync process is initiated to duplicate the directory structure
# and the symlinks, and exclude .erb files
# * each erb files (thus with extension .erb) is interpreted, the
# corresponding file is generated without the .erb extension
# Supported options:
# :erb_exclude [array of strings]: pattern(s) to exclude from erb file
# interpretation and thus to copy 'as is'
# :no_interaction [boolean]: do not interact
def init_from_template(templatedir, rootdir, config = {},
options = {
:erb_exclude => [],
:no_interaction => false
})
error "Unable to find the template directory" unless File.directory?(templatedir)
warning "about to initialize/update the directory #{rootdir}"
really_continue? unless options[:no_interaction]
run %( mkdir -p #{rootdir} ) unless File.directory?( rootdir )
run %( rsync --exclude '*.erb' --exclude '.texinfo*' -avzu #{templatedir}/ #{rootdir}/ )
Dir["#{templatedir}/**/*.erb"].each do |erbfile|
relative_outdir = Pathname.new( File.realpath( File.dirname(erbfile) )).relative_path_from Pathname.new(templatedir)
filename = File.basename(erbfile, '.erb')
outdir = File.realpath( File.join(rootdir, relative_outdir.to_s) )
outfile = File.join(outdir, filename)
unless options[:erb_exclude].nil?
exclude_entry = false
options[:erb_exclude].each do |pattern|
exclude_entry |= erbfile =~ /#{pattern}/
end
if exclude_entry
info "copying non-interpreted ERB file"
# copy this file since it has been probably excluded from teh rsync process
run %( cp #{erbfile} #{outdir}/ )
next
end
end
# Let's go
info "updating '#{relative_outdir}/#{filename}'"
puts " using ERB template '#{erbfile}'"
write_from_erb_template(erbfile, outfile, config, options)
end
end
###
# ERB generation of the file `outfile` using the source template file `erbfile`
# Supported options:
# :no_interaction [boolean]: do not interact
# :srcdir [string]: source dir for all considered ERB files
def write_from_erb_template(erbfile, outfile, config = {},
options = {
:no_interaction => false
})
erbfiles = (erbfile.is_a?(Array)) ? erbfile : [ erbfile ]
content = ""
erbfiles.each do |f|
erb = (options[:srcdir].nil?) ? f : File.join(options[:srcdir], f)
unless File.exist?(erb)
warning "Unable to find the template ERBfile '#{erb}'"
really_continue? unless options[:no_interaction]
next
end
content += ERB.new(File.read(erb.to_s), nil, '<>').result(binding)
end
# error "Unable to find the template file #{erbfile}" unless File.exists? (erbfile )
# template = File.read("#{erbfile}")
# output = ERB.new(template, nil, '<>')
# content = output.result(binding)
show_diff_and_write(content, outfile, options)
end
## Show the difference between a `content` string and an destination file (using Diff algorithm).
# Obviosuly, if the outfile does not exists, no difference is proposed.
# Supported options:
# :no_interaction [boolean]: do not interact
# :json_pretty_format [boolean]: write a json content, in pretty format
# :no_commit [boolean]: do not (offer to) commit the changes
# return 0 if nothing happened, 1 if a write has been done
def show_diff_and_write(content, outfile, options = {
:no_interaction => false,
:json_pretty_format => false,
:no_commit => false
})
if File.exist?( outfile )
ref = File.read( outfile )
if options[:json_pretty_format]
ref = JSON.pretty_generate(JSON.parse( IO.read( outfile ) ))
end
if ref == content
warn "Nothing to update"
return 0
end
warn "the file '#{outfile}' already exists and will be overwritten."
warn "Expected difference: \n------"
Diffy::Diff.default_format = :color
puts Diffy::Diff.new(ref, content, :context => 1)
else
watch = (options[:no_interaction]) ? 'no' : ask( cyan(" ==> Do you want to see the generated file before commiting the writing (y|N)"), 'No')
puts content if watch =~ /y.*/i
end
proceed = (options[:no_interaction]) ? 'yes' : ask( cyan(" ==> proceed with the writing (Y|n)"), 'Yes')
return 0 if proceed =~ /n.*/i
info("=> writing #{outfile}")
File.open(outfile.to_s, "w+") do |f|
f.write content
end
if FalkorLib::Git.init?(File.dirname(outfile)) && !options[:no_commit]
do_commit = (options[:no_interaction]) ? 'yes' : ask( cyan(" ==> commit the changes (Y|n)"), 'Yes')
FalkorLib::Git.add(outfile, "update content of '#{File.basename(outfile)}'") if do_commit =~ /y.*/i
end
1
end
## Blind copy of a source file `src` into its destination directory `dstdir`
# Supported options:
# :no_interaction [boolean]: do not interact
# :srcdir [string]: source directory, make the `src` file relative to that directory
# :outfile [string]: alter the outfile name (File.basename(src) by default)
# :no_commit [boolean]: do not (offer to) commit the changes
def write_from_template(src, dstdir, options = {
:no_interaction => false,
:no_commit => false,
:srcdir => '',
:outfile => ''
})
srcfile = (options[:srcdir].nil?) ? src : File.join(options[:srcdir], src)
error "Unable to find the source file #{srcfile}" unless File.exist?( srcfile )
error "The destination directory '#{dstdir}' do not exist" unless File.directory?( dstdir )
dstfile = (options[:outfile].nil?) ? File.basename(srcfile) : options[:outfile]
outfile = File.join(dstdir, dstfile)
content = File.read( srcfile )
show_diff_and_write(content, outfile, options)
end # copy_from_template
### RVM init
def init_rvm(rootdir = Dir.pwd, gemset = '')
rvm_files = {
:version => File.join(rootdir, '.ruby-version'),
:gemset => File.join(rootdir, '.ruby-gemset')
}
unless File.exist?( (rvm_files[:version]).to_s)
v = select_from(FalkorLib.config[:rvm][:rubies],
"Select RVM ruby to configure for this directory",
3)
File.open( rvm_files[:version], 'w') do |f|
f.puts v
end
end
unless File.exist?( (rvm_files[:gemset]).to_s)
g = (gemset.empty?) ? ask("Enter RVM gemset name for this directory", File.basename(rootdir)) : gemset
File.open( rvm_files[:gemset], 'w') do |f|
f.puts g
end
end
end
###### normalize_path ######
# Normalize a path and return the absolute path foreseen
# Ex: '.' return Dir.pwd
# Supported options:
# * :relative [boolean] return relative path to the root dir
##
def normalized_path(dir = Dir.pwd, options = {})
rootdir = (FalkorLib::Git.init?(dir)) ? FalkorLib::Git.rootdir(dir) : dir
path = dir
path = Dir.pwd if dir == '.'
path = File.join(Dir.pwd, dir) unless (dir =~ /^\// || (dir == '.'))
if (options[:relative] || options[:relative_to])
root = (options[:relative_to]) ? options[:relative_to] : rootdir
relative_path_to_root = Pathname.new( File.realpath(path) ).relative_path_from Pathname.new(root)
path = relative_path_to_root.to_s
end
path
end # normalize_path
end
|
tmtysk/swf_ruby | lib/swf_ruby/replace_target.rb | SwfRuby.SpriteReplaceTarget.build_control_tags_string | ruby | def build_control_tags_string
str = ""
valid_control_tag_codes = [0, 1, 4, 5, 12, 18, 19, 26, 28, 43, 45, 70, 72]
@control_tags.each do |t|
next unless valid_control_tag_codes.include? t.code
if @idmap[t.refer_character_id]
str << t.rawdata_with_refer_character_id(@idmap[t.refer_character_id])
else
str << t.rawdata
end
end
str
end | DefineSpriteに埋め込むためのControl tagsのみを抽出する.
参照先のcharacter_idを変更する必要がある場合は付け替える. | train | https://github.com/tmtysk/swf_ruby/blob/97e1e18c4e7b7a67e21378f6e13f40c7b9ea27c8/lib/swf_ruby/replace_target.rb#L106-L118 | class SpriteReplaceTarget < ReplaceTarget
attr_accessor :swf
attr_accessor :frame_count
attr_accessor :define_tags
attr_accessor :control_tags
attr_accessor :idmap
attr_accessor :target_define_tags_string
attr_accessor :target_control_tags_string
attr_reader :target_swf_dumper
def initialize(offset, swf)
@offset = offset
@swf = swf
@target_swf_dumper = SwfDumper.new.dump(@swf)
@frame_count = @target_swf_dumper.header.frame_count
@define_tags = @target_swf_dumper.tags.select { |t| t.define_tag? }
@control_tags = @target_swf_dumper.tags - @define_tags
@idmap = { 65535 => 65535 }
end
def self.build_list_by_instance_var_names(swf_dumper, var_name_to_swf)
from_character_id = (swf_dumper.tags.collect { |t| t.define_tag? ? t.character_id : nil }).compact.max + 1
repl_targets = []
var_name_to_swf.each do |var_name, swf|
repl_target, from_character_id = SwfRuby::SpriteReplaceTarget.build_by_instance_var_name(swf_dumper, var_name, swf, from_character_id)
repl_targets << repl_target
end
repl_targets
end
# 指定したインスタンス変数名に対するSpriteReplaceTargetを生成する
def self.build_by_instance_var_name(swf_dumper, var_name, swf, from_character_id = nil)
from_character_id ||= (swf_dumper.tags.collect { |t| t.define_tag? ? t.character_id : nil }).compact.max + 1
refer_character_id = nil
sprite_indices = {}
swf_dumper.tags.each_with_index do |t,i|
if t.character_id
sprite_indices[t.character_id] = i
end
if Swf::TAG_TYPE[t.code] == "DefineSprite"
sd = SwfRuby::SpriteDumper.new
sd.dump(t)
sd.tags.each do |t2|
if var_name == t2.refer_character_inst_name
refer_character_id = t2.refer_character_id
break
end
end
else
if var_name == t.refer_character_inst_name
refer_character_id = t.refer_character_id
end
end
break if refer_character_id
end
raise ReplaceTargetError unless refer_character_id
offset = swf_dumper.tags_addresses[sprite_indices[refer_character_id]]
srt = SpriteReplaceTarget.new(offset, swf)
srt.target_define_tags_string, from_character_id = srt.build_define_tags_string(from_character_id)
srt.target_control_tags_string = srt.build_control_tags_string
[srt, from_character_id]
end
# 置換するSWFからCharacterIdを付け替えながらDefineタグを抽出する.
# 対象のSWFにBitmapIDの参照が含まれる場合、これも合わせて付け替える.
# 同時に、CharacterIdの対応付けマップを作成する.
def build_define_tags_string(from_character_id)
str = ""
@define_tags.each do |t|
if t.character_id
from_character_id += 1
@idmap[t.character_id] = from_character_id
str << t.rawdata_with_define_character_id(@idmap, @idmap[t.character_id])
end
end
[str, from_character_id+1]
end
# DefineSpriteに埋め込むためのControl tagsのみを抽出する.
# 参照先のcharacter_idを変更する必要がある場合は付け替える.
end
|
mongodb/mongoid | lib/mongoid/criteria.rb | Mongoid.Criteria.read | ruby | def read(value = nil)
clone.tap do |criteria|
criteria.options.merge!(read: value)
end
end | Set the read preference for the criteria.
@example Set the read preference.
criteria.read(mode: :primary_preferred)
@param [ Hash ] value The mode preference.
@return [ Criteria ] The cloned criteria.
@since 5.0.0 | train | https://github.com/mongodb/mongoid/blob/56976e32610f4c2450882b0bfe14da099f0703f4/lib/mongoid/criteria.rb#L310-L314 | class Criteria
include Enumerable
include Contextual
include Queryable
include Findable
include Inspectable
include Includable
include Marshalable
include Modifiable
include Scopable
include Clients::Options
include Clients::Sessions
include Options
# Static array used to check with method missing - we only need to ever
# instantiate once.
#
# @since 4.0.0
CHECK = []
attr_accessor :embedded, :klass, :parent_document, :association
# Returns true if the supplied +Enumerable+ or +Criteria+ is equal to the results
# of this +Criteria+ or the criteria itself.
#
# @note This will force a database load when called if an enumerable is passed.
#
# @param [ Object ] other The other +Enumerable+ or +Criteria+ to compare to.
#
# @return [ true, false ] If the objects are equal.
#
# @since 1.0.0
def ==(other)
return super if other.respond_to?(:selector)
entries == other
end
# Needed to properly get a criteria back as json
#
# @example Get the criteria as json.
# Person.where(:title => "Sir").as_json
#
# @param [ Hash ] options Options to pass through to the serializer.
#
# @return [ String ] The JSON string.
def as_json(options = nil)
entries.as_json(options)
end
# Tells the criteria that the cursor that gets returned needs to be
# cached. This is so multiple iterations don't hit the database multiple
# times, however this is not advisable when working with large data sets
# as the entire results will get stored in memory.
#
# @example Flag the criteria as cached.
# criteria.cache
#
# @return [ Criteria ] The cloned criteria.
def cache
crit = clone
crit.options.merge!(cache: true)
crit
end
# Will return true if the cache option has been set.
#
# @example Is the criteria cached?
# criteria.cached?
#
# @return [ true, false ] If the criteria is flagged as cached.
def cached?
options[:cache] == true
end
# Get the documents from the embedded criteria.
#
# @example Get the documents.
# criteria.documents
#
# @return [ Array<Document> ] The documents.
#
# @since 3.0.0
def documents
@documents ||= []
end
# Set the embedded documents on the criteria.
#
# @example Set the documents.
#
# @param [ Array<Document> ] docs The embedded documents.
#
# @return [ Array<Document> ] The embedded documents.
#
# @since 3.0.0
def documents=(docs)
@documents = docs
end
# Is the criteria for embedded documents?
#
# @example Is the criteria for embedded documents?
# criteria.embedded?
#
# @return [ true, false ] If the criteria is embedded.
#
# @since 3.0.0
def embedded?
!!@embedded
end
# Extract a single id from the provided criteria. Could be in an $and
# query or a straight _id query.
#
# @example Extract the id.
# criteria.extract_id
#
# @return [ Object ] The id.
#
# @since 2.3.0
def extract_id
selector.extract_id
end
# Adds a criterion to the +Criteria+ that specifies additional options
# to be passed to the Ruby driver, in the exact format for the driver.
#
# @example Add extra params to the criteria.
# criteria.extras(:limit => 20, :skip => 40)
#
# @param [ Hash ] extras The extra driver options.
#
# @return [ Criteria ] The cloned criteria.
#
# @since 2.0.0
def extras(extras)
crit = clone
crit.options.merge!(extras)
crit
end
# Get the list of included fields.
#
# @example Get the field list.
# criteria.field_list
#
# @return [ Array<String> ] The fields.
#
# @since 2.0.0
def field_list
if options[:fields]
options[:fields].keys.reject{ |key| key == "_type" }
else
[]
end
end
# When freezing a criteria we need to initialize the context first
# otherwise the setting of the context on attempted iteration will raise a
# runtime error.
#
# @example Freeze the criteria.
# criteria.freeze
#
# @return [ Criteria ] The frozen criteria.
#
# @since 2.0.0
def freeze
context and inclusions and super
end
# Initialize the new criteria.
#
# @example Init the new criteria.
# Criteria.new(Band)
#
# @param [ Class ] klass The model class.
#
# @since 1.0.0
def initialize(klass)
@klass = klass
@embedded = nil
@none = nil
klass ? super(klass.aliased_fields, klass.fields) : super({}, {})
end
# Merges another object with this +Criteria+ and returns a new criteria.
# The other object may be a +Criteria+ or a +Hash+. This is used to
# combine multiple scopes together, where a chained scope situation
# may be desired.
#
# @example Merge the criteria with another criteria.
# criteri.merge(other_criteria)
#
# @example Merge the criteria with a hash. The hash must contain a klass
# key and the key/value pairs correspond to method names/args.
#
# criteria.merge({
# klass: Band,
# where: { name: "Depeche Mode" },
# order_by: { name: 1 }
# })
#
# @param [ Criteria ] other The other criterion to merge with.
#
# @return [ Criteria ] A cloned self.
def merge(other)
crit = clone
crit.merge!(other)
crit
end
# Merge the other criteria into this one.
#
# @example Merge another criteria into this criteria.
# criteria.merge(Person.where(name: "bob"))
#
# @param [ Criteria ] other The criteria to merge in.
#
# @return [ Criteria ] The merged criteria.
#
# @since 3.0.0
def merge!(other)
criteria = other.to_criteria
selector.merge!(criteria.selector)
options.merge!(criteria.options)
self.documents = criteria.documents.dup unless criteria.documents.empty?
self.scoping_options = criteria.scoping_options
self.inclusions = (inclusions + criteria.inclusions).uniq
self
end
# Returns a criteria that will always contain zero results and never hits
# the database.
#
# @example Return a none criteria.
# criteria.none
#
# @return [ Criteria ] The none criteria.
#
# @since 4.0.0
def none
@none = true and self
end
# Is the criteria an empty but chainable criteria?
#
# @example Is the criteria a none criteria?
# criteria.empty_and_chainable?
#
# @return [ true, false ] If the criteria is a none.
#
# @since 4.0.0
def empty_and_chainable?
!!@none
end
# Overriden to include _type in the fields.
#
# @example Limit the fields returned from the database.
# Band.only(:name)
#
# @param [ Array<Symbol> ] args The names of the fields.
#
# @return [ Criteria ] The cloned criteria.
#
# @since 1.0.0
def only(*args)
return clone if args.flatten.empty?
args = args.flatten
if (args & Fields::IDS).empty?
args.unshift(:_id)
end
if klass.hereditary?
super(*args.push(:_type))
else
super(*args)
end
end
# Set the read preference for the criteria.
#
# @example Set the read preference.
# criteria.read(mode: :primary_preferred)
#
# @param [ Hash ] value The mode preference.
#
# @return [ Criteria ] The cloned criteria.
#
# @since 5.0.0
# Overriden to exclude _id from the fields.
#
# @example Exclude fields returned from the database.
# Band.without(:name)
#
# @param [ Array<Symbol> ] args The names of the fields.
#
# @return [ Criteria ] The cloned criteria.
#
# @since 4.0.3
def without(*args)
args -= Fields::IDS
super(*args)
end
# Returns true if criteria responds to the given method.
#
# @example Does the criteria respond to the method?
# crtiteria.respond_to?(:each)
#
# @param [ Symbol ] name The name of the class method on the +Document+.
# @param [ true, false ] include_private Whether to include privates.
#
# @return [ true, false ] If the criteria responds to the method.
def respond_to?(name, include_private = false)
super || klass.respond_to?(name) || CHECK.respond_to?(name, include_private)
end
alias :to_ary :to_a
# Convenience for objects that want to be merged into a criteria.
#
# @example Convert to a criteria.
# criteria.to_criteria
#
# @return [ Criteria ] self.
#
# @since 3.0.0
def to_criteria
self
end
# Convert the criteria to a proc.
#
# @example Convert the criteria to a proc.
# criteria.to_proc
#
# @return [ Proc ] The wrapped criteria.
#
# @since 3.0.0
def to_proc
->{ self }
end
# Adds a criterion to the +Criteria+ that specifies a type or an Array of
# types that must be matched.
#
# @example Match only specific models.
# criteria.type('Browser')
# criteria.type(['Firefox', 'Browser'])
#
# @param [ Array<String> ] types The types to match against.
#
# @return [ Criteria ] The cloned criteria.
def type(types)
any_in(_type: Array(types))
end
# This is the general entry point for most MongoDB queries. This either
# creates a standard field: value selection, and expanded selection with
# the use of hash methods, or a $where selection if a string is provided.
#
# @example Add a standard selection.
# criteria.where(name: "syd")
#
# @example Add a javascript selection.
# criteria.where("this.name == 'syd'")
#
# @param [ String, Hash ] expression The javascript or standard selection.
#
# @raise [ UnsupportedJavascript ] If provided a string and the criteria
# is embedded.
#
# @return [ Criteria ] The cloned selectable.
#
# @since 1.0.0
def where(expression)
if expression.is_a?(::String) && embedded?
raise Errors::UnsupportedJavascript.new(klass, expression)
end
super
end
# Get a version of this criteria without the options.
#
# @example Get the criteria without options.
# criteria.without_options
#
# @return [ Criteria ] The cloned criteria.
#
# @since 3.0.4
def without_options
crit = clone
crit.options.clear
crit
end
# Find documents by the provided javascript and scope. Uses a $where but is
# different from +Criteria#where+ in that it will pass a code object to the
# query instead of a pure string. Safe against Javascript injection
# attacks.
#
# @example Find by javascript.
# Band.for_js("this.name = param", param: "Tool")
#
# @param [ String ] javascript The javascript to execute in the $where.
# @param [ Hash ] scope The scope for the code.
#
# @return [ Criteria ] The criteria.
#
# @since 3.1.0
def for_js(javascript, scope = {})
js_query(BSON::CodeWithScope.new(javascript, scope))
end
private
# Are documents in the query missing, and are we configured to raise an
# error?
#
# @api private
#
# @example Check for missing documents.
# criteria.check_for_missing_documents!([], [ 1 ])
#
# @param [ Array<Document> ] result The result.
# @param [ Array<Object> ] ids The ids.
#
# @raise [ Errors::DocumentNotFound ] If none are found and raising an
# error.
#
# @since 3.0.0
def check_for_missing_documents!(result, ids)
if (result.size < ids.size) && Mongoid.raise_not_found_error
raise Errors::DocumentNotFound.new(klass, ids, ids - result.map(&:_id))
end
end
# Clone or dup the current +Criteria+. This will return a new criteria with
# the selector, options, klass, embedded options, etc intact.
#
# @api private
#
# @example Clone a criteria.
# criteria.clone
#
# @example Dup a criteria.
# criteria.dup
#
# @param [ Criteria ] other The criteria getting cloned.
#
# @return [ nil ] nil.
#
# @since 1.0.0
def initialize_copy(other)
@inclusions = other.inclusions.dup
@scoping_options = other.scoping_options
@documents = other.documents.dup
@context = nil
super
end
# Used for chaining +Criteria+ scopes together in the for of class methods
# on the +Document+ the criteria is for.
#
# @example Handle method missing.
# criteria.method_missing(:name)
#
# @param [ Symbol ] name The method name.
# @param [ Array ] args The arguments.
#
# @return [ Object ] The result of the method call.
#
# @since 1.0.0
def method_missing(name, *args, &block)
if klass.respond_to?(name)
klass.send(:with_scope, self) do
klass.send(name, *args, &block)
end
elsif CHECK.respond_to?(name)
return entries.send(name, *args, &block)
else
super
end
end
# For models where inheritance is at play we need to add the type
# selection.
#
# @example Add the type selection.
# criteria.merge_type_selection
#
# @return [ true, false ] If type selection was added.
#
# @since 3.0.3
def merge_type_selection
selector.merge!(type_selection) if type_selectable?
end
# Is the criteria type selectable?
#
# @api private
#
# @example If the criteria type selectable?
# criteria.type_selectable?
#
# @return [ true, false ] If type selection should be added.
#
# @since 3.0.3
def type_selectable?
klass.hereditary? &&
!selector.keys.include?("_type") &&
!selector.keys.include?(:_type)
end
# Get the selector for type selection.
#
# @api private
#
# @example Get a type selection hash.
# criteria.type_selection
#
# @return [ Hash ] The type selection.
#
# @since 3.0.3
def type_selection
klasses = klass._types
if klasses.size > 1
{ _type: { "$in" => klass._types }}
else
{ _type: klass._types[0] }
end
end
# Get a new selector with type selection in it.
#
# @api private
#
# @example Get a selector with type selection.
# criteria.selector_with_type_selection
#
# @return [ Hash ] The selector.
#
# @since 3.0.3
def selector_with_type_selection
type_selectable? ? selector.merge(type_selection) : selector
end
end
|
senchalabs/jsduck | lib/jsduck/tag/mixins.rb | JsDuck::Tag.Mixins.to_mixins_array | ruby | def to_mixins_array(ast)
v = ast.to_value
mixins = v.is_a?(Hash) ? v.values : Array(v)
mixins.all? {|mx| mx.is_a? String } ? mixins : []
end | converts AstNode, whether it's a string, array or hash into
array of strings (when possible). | train | https://github.com/senchalabs/jsduck/blob/febef5558ecd05da25f5c260365acc3afd0cafd8/lib/jsduck/tag/mixins.rb#L21-L25 | class Mixins < ClassListTag
def initialize
@pattern = ["mixin", "mixins"]
@tagname = :mixins
@repeatable = true
@ext_define_pattern = "mixins"
@ext_define_default = {:mixins => []}
end
# Override definition in parent class. In addition to Array
# literal, mixins can be defined with an object literal.
def parse_ext_define(cls, ast)
cls[:mixins] = to_mixins_array(ast)
end
# converts AstNode, whether it's a string, array or hash into
# array of strings (when possible).
end
|
dicom/rtp-connect | lib/rtp-connect/plan.rb | RTP.Plan.open_file | ruby | def open_file(file)
# Check if file already exists:
if File.exist?(file)
# Is (the existing file) writable?
unless File.writable?(file)
raise "The program does not have permission or resources to create this file: #{file}"
end
else
# File does not exist.
# Check if this file's path contains a folder that does not exist, and therefore needs to be created:
folders = file.split(File::SEPARATOR)
if folders.length > 1
# Remove last element (which should be the file string):
folders.pop
path = folders.join(File::SEPARATOR)
# Check if this path exists:
unless File.directory?(path)
# We need to create (parts of) this path:
require 'fileutils'
FileUtils.mkdir_p(path)
end
end
end
# It has been verified that the file can be created:
return File.new(file, 'wb:ISO8859-1')
end | Tests if the path/file is writable, creates any folders if necessary, and opens the file for writing.
@param [String] file a path/file string
@raise if the given file cannot be created | train | https://github.com/dicom/rtp-connect/blob/e23791970218a7087a0d798aa430acf36f79d758/lib/rtp-connect/plan.rb#L574-L599 | class Plan < Record
include Logging
# The Record which this instance belongs to (nil by definition).
attr_reader :parent
# An array of Prescription records (if any) that belongs to this Plan.
attr_reader :prescriptions
# The ExtendedPlan record (if any) that belongs to this Plan.
attr_reader :extended_plan
# An array of DoseTracking records (if any) that belongs to this Plan.
attr_reader :dose_trackings
attr_reader :patient_id
attr_reader :patient_last_name
attr_reader :patient_first_name
attr_reader :patient_middle_initial
attr_reader :plan_id
attr_reader :plan_date
attr_reader :plan_time
attr_reader :course_id
attr_reader :diagnosis
attr_reader :md_last_name
attr_reader :md_first_name
attr_reader :md_middle_initial
attr_reader :md_approve_last_name
attr_reader :md_approve_first_name
attr_reader :md_approve_middle_initial
attr_reader :phy_approve_last_name
attr_reader :phy_approve_first_name
attr_reader :phy_approve_middle_initial
attr_reader :author_last_name
attr_reader :author_first_name
attr_reader :author_middle_initial
attr_reader :rtp_mfg
attr_reader :rtp_model
attr_reader :rtp_version
attr_reader :rtp_if_protocol
attr_reader :rtp_if_version
# Creates a new Plan by loading a plan definition string (i.e. a single line).
#
# @note This method does not perform crc verification on the given string.
# If such verification is desired, use methods ::parse or ::read instead.
# @param [#to_s] string the plan definition record string line
# @param [Hash] options the options to use for loading the plan definition string
# @option options [Boolean] :repair if true, a record containing invalid CSV will be attempted fixed and loaded
# @return [Plan] the created Plan instance
# @raise [ArgumentError] if given a string containing an invalid number of elements
#
def self.load(string, options={})
rtp = self.new
rtp.load(string, options)
end
# Creates a Plan instance by parsing an RTPConnect string.
#
# @param [#to_s] string an RTPConnect ascii string (with single or multiple lines/records)
# @param [Hash] options the options to use for parsing the RTP string
# @option options [Boolean] :ignore_crc if true, the RTP records will be successfully loaded even if their checksums are invalid
# @option options [Boolean] :repair if true, any RTP records containing invalid CSV will be attempted fixed and loaded
# @option options [Boolean] :skip_unknown if true, unknown records will be skipped, and record instances will be built from the remaining recognized string records
# @return [Plan] the created Plan instance
# @raise [ArgumentError] if given an invalid string record
#
def self.parse(string, options={})
lines = string.to_s.split("\r\n")
# Create the Plan object:
line = lines.first
RTP.verify(line, options)
rtp = self.load(line, options)
lines[1..-1].each do |line|
# Validate, determine type, and process the line accordingly to
# build the hierarchy of records:
RTP.verify(line, options)
values = line.values(options[:repair])
keyword = values.first
method = RTP::PARSE_METHOD[keyword]
if method
rtp.send(method, line)
else
if options[:skip_unknown]
logger.warn("Skipped unknown record definition: #{keyword}")
else
raise ArgumentError, "Unknown keyword #{keyword} extracted from string."
end
end
end
return rtp
end
# Creates an Plan instance by reading and parsing an RTPConnect file.
#
# @param [String] file a string which specifies the path of the RTPConnect file to be loaded
# @param [Hash] options the options to use for reading the RTP file
# @option options [Boolean] :ignore_crc if true, the RTP records will be successfully loaded even if their checksums are invalid
# @option options [Boolean] :repair if true, any RTP records containing invalid CSV will be attempted fixed and loaded
# @option options [Boolean] :skip_unknown if true, unknown records will be skipped, and record instances will be built from the remaining recognized string records
# @return [Plan] the created Plan instance
# @raise [ArgumentError] if given an invalid file or the file given contains an invalid record
#
def self.read(file, options={})
raise ArgumentError, "Invalid argument 'file'. Expected String, got #{file.class}." unless file.is_a?(String)
# Read the file content:
str = nil
unless File.exist?(file)
logger.error("Invalid (non-existing) file: #{file}")
else
unless File.readable?(file)
logger.error("File exists but I don't have permission to read it: #{file}")
else
if File.directory?(file)
logger.error("Expected a file, got a directory: #{file}")
else
if File.size(file) < 10
logger.error("This file is too small to contain valid RTP information: #{file}.")
else
str = File.open(file, 'rb:ISO8859-1') { |f| f.read }
end
end
end
end
# Parse the file contents and create the RTP instance:
if str
rtp = self.parse(str, options)
else
raise "An RTP::Plan object could not be created from the specified file. Check the log for more details."
end
return rtp
end
# Creates a new Plan.
#
def initialize
super('PLAN_DEF', 10, 28)
@current_parent = self
# Child records:
@extended_plan = nil
@prescriptions = Array.new
@dose_trackings = Array.new
# No parent (by definition) for the Plan record:
@parent = nil
@attributes = [
# Required:
:keyword,
:patient_id,
:patient_last_name,
:patient_first_name,
:patient_middle_initial,
:plan_id,
:plan_date,
:plan_time,
:course_id,
# Optional:
:diagnosis,
:md_last_name,
:md_first_name,
:md_middle_initial,
:md_approve_last_name,
:md_approve_first_name,
:md_approve_middle_initial,
:phy_approve_last_name,
:phy_approve_first_name,
:phy_approve_middle_initial,
:author_last_name,
:author_first_name,
:author_middle_initial,
:rtp_mfg,
:rtp_model,
:rtp_version,
:rtp_if_protocol,
:rtp_if_version
]
end
# Checks for equality.
#
# Other and self are considered equivalent if they are
# of compatible types and their attributes are equivalent.
#
# @param other an object to be compared with self.
# @return [Boolean] true if self and other are considered equivalent
#
def ==(other)
if other.respond_to?(:to_plan)
other.send(:state) == state
end
end
alias_method :eql?, :==
# Adds a dose tracking record to this instance.
#
# @param [DoseTracking] child a DoseTracking instance which is to be associated with self
#
def add_dose_tracking(child)
@dose_trackings << child.to_dose_tracking
child.parent = self
end
# Adds an extended plan record to this instance.
#
# @param [ExtendedPlan] child an ExtendedPlan instance which is to be associated with self
#
def add_extended_plan(child)
@extended_plan = child.to_extended_plan
child.parent = self
end
# Adds a prescription site record to this instance.
#
# @param [Prescription] child a Prescription instance which is to be associated with self
#
def add_prescription(child)
@prescriptions << child.to_prescription
child.parent = self
end
# Collects the child records of this instance in a properly sorted array.
#
# @return [Array<Prescription, DoseTracking>] a sorted array of self's child records
#
def children
return [@extended_plan, @prescriptions, @dose_trackings].flatten.compact
end
# Removes the reference of the given instance from this instance.
#
# @param [ExtendedPlan, Prescription, DoseTracking] record a child record to be removed from this instance
#
def delete(record)
case record
when Prescription
delete_child(:prescriptions, record)
when DoseTracking
delete_child(:dose_trackings, record)
when ExtendedPlan
delete_extended_plan
else
logger.warn("Unknown class (record) given to Plan#delete: #{record.class}")
end
end
# Removes all dose_tracking references from this instance.
#
def delete_dose_trackings
delete_children(:dose_trackings)
end
# Removes the extended plan reference from this instance.
#
def delete_extended_plan
delete_child(:extended_plan)
end
# Removes all prescription references from this instance.
#
def delete_prescriptions
delete_children(:prescriptions)
end
# Computes a hash code for this object.
#
# @note Two objects with the same attributes will have the same hash code.
#
# @return [Fixnum] the object's hash code
#
def hash
state.hash
end
# Returns self.
#
# @return [Plan] self
#
def to_plan
self
end
# Returns self.
#
# @return [Plan] self
#
def to_rtp
self
end
# Writes the Plan object, along with its hiearchy of child objects,
# to a properly formatted RTPConnect ascii file.
#
# @param [String] file a path/file string
# @param [Hash] options an optional hash parameter
# @option options [Float] :version the Mosaiq compatibility version number (e.g. 2.4) used for the output
#
def write(file, options={})
f = open_file(file)
f.write(to_s(options))
f.close
end
# Sets the patient_id attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def patient_id=(value)
@patient_id = value && value.to_s
end
# Sets the patient_last_name attribute.
#
def patient_last_name=(value)
@patient_last_name = value && value.to_s
end
# Sets the patient_first_name attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def patient_first_name=(value)
@patient_first_name = value && value.to_s
end
# Sets the patient_middle_initial attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def patient_middle_initial=(value)
@patient_middle_initial = value && value.to_s
end
# Sets the plan_id attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def plan_id=(value)
@plan_id = value && value.to_s
end
# Sets the plan_date attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def plan_date=(value)
@plan_date = value && value.to_s
end
# Sets the plan_time attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def plan_time=(value)
@plan_time = value && value.to_s
end
# Sets the course_id attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def course_id=(value)
@course_id = value && value.to_s
end
# Sets the diagnosis attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def diagnosis=(value)
@diagnosis = value && value.to_s
end
# Sets the md_last_name attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def md_last_name=(value)
@md_last_name = value && value.to_s
end
# Sets the md_first_name attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def md_first_name=(value)
@md_first_name = value && value.to_s
end
# Sets the md_middle_initial attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def md_middle_initial=(value)
@md_middle_initial = value && value.to_s
end
# Sets the md_approve_last_name attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def md_approve_last_name=(value)
@md_approve_last_name = value && value.to_s
end
# Sets the md_approve_first_name attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def md_approve_first_name=(value)
@md_approve_first_name = value && value.to_s
end
# Sets the md_approve_middle_initial attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def md_approve_middle_initial=(value)
@md_approve_middle_initial = value && value.to_s
end
# Sets the phy_approve_last_name attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def phy_approve_last_name=(value)
@phy_approve_last_name = value && value.to_s
end
# Sets the phy_approve_first_name attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def phy_approve_first_name=(value)
@phy_approve_first_name = value && value.to_s
end
# Sets the phy_approve_middle_initial attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def phy_approve_middle_initial=(value)
@phy_approve_middle_initial = value && value.to_s
end
# Sets the author_last_name attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def author_last_name=(value)
@author_last_name = value && value.to_s
end
# Sets the author_first_name attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def author_first_name=(value)
@author_first_name = value && value.to_s
end
# Sets the author_middle_initial attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def author_middle_initial=(value)
@author_middle_initial = value && value.to_s
end
# Sets the rtp_mfg attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def rtp_mfg=(value)
@rtp_mfg = value && value.to_s
end
# Sets the rtp_model attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def rtp_model=(value)
@rtp_model = value && value.to_s
end
# Sets the rtp_version attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def rtp_version=(value)
@rtp_version = value && value.to_s
end
# Sets the rtp_if_protocol attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def rtp_if_protocol=(value)
@rtp_if_protocol = value && value.to_s
end
# Sets the rtp_if_version attribute.
#
# @param [nil, #to_s] value the new attribute value
#
def rtp_if_version=(value)
@rtp_if_version = value && value.to_s
end
private
# Creates a control point record from the given string.
#
# @param [String] string a string line containing a control point definition
#
def control_point(string)
cp = ControlPoint.load(string, @current_parent)
@current_parent = cp
end
# Creates a dose tracking record from the given string.
#
# @param [String] string a string line containing a dose tracking definition
#
def dose_tracking(string)
dt = DoseTracking.load(string, @current_parent)
@current_parent = dt
end
# Creates an extended plan record from the given string.
#
# @param [String] string a string line containing an extended plan definition
#
def extended_plan_def(string)
ep = ExtendedPlan.load(string, @current_parent)
@current_parent = ep
end
# Creates an extended treatment field record from the given string.
#
# @param [String] string a string line containing an extended treatment field definition
#
def extended_treatment_field(string)
ef = ExtendedField.load(string, @current_parent)
@current_parent = ef
end
# Tests if the path/file is writable, creates any folders if necessary, and opens the file for writing.
#
# @param [String] file a path/file string
# @raise if the given file cannot be created
#
# Creates a prescription site record from the given string.
#
# @param [String] string a string line containing a prescription site definition
#
def prescription_site(string)
p = Prescription.load(string, @current_parent)
@current_parent = p
end
# Creates a site setup record from the given string.
#
# @param [String] string a string line containing a site setup definition
#
def site_setup(string)
s = SiteSetup.load(string, @current_parent)
@current_parent = s
end
# Collects the attributes of this instance.
#
# @note The CRC is not considered part of the attributes of interest and is excluded
# @return [Array<String>] an array of attributes
#
alias_method :state, :values
# Creates a treatment field record from the given string.
#
# @param [String] string a string line containing a treatment field definition
#
def treatment_field(string)
f = Field.load(string, @current_parent)
@current_parent = f
end
# Creates a simulation field record from the given string.
#
# @param [String] string a string line containing a simulation field definition
#
def simulation_field(string)
sf = SimulationField.load(string, @current_parent)
@current_parent = sf
end
end
|
moneta-rb/moneta | lib/moneta/mixins.rb | Moneta.CreateSupport.create | ruby | def create(key, value, options = {})
if key? key
false
else
store(key, value, options)
true
end
end | (see Defaults#create) | train | https://github.com/moneta-rb/moneta/blob/26a118c8b2c93d11257f4a5fe9334a8157f4db47/lib/moneta/mixins.rb#L452-L459 | module CreateSupport
# (see Defaults#create)
def self.included(base)
base.supports(:create) if base.respond_to?(:supports)
end
end
|
jdigger/git-process | lib/git-process/git_lib.rb | GitProc.GitLib.rebase | ruby | def rebase(upstream, opts = {})
args = []
if opts[:interactive]
logger.info { "Interactively rebasing #{branches.current.name} against #{upstream}" }
args << '-i'
args << upstream
elsif opts[:oldbase]
logger.info { "Doing rebase from #{opts[:oldbase]} against #{upstream} on #{branches.current.name}" }
args << '--onto' << upstream << opts[:oldbase] << branches.current.name
else
logger.info { "Rebasing #{branches.current.name} against #{upstream}" }
args << upstream
end
return command('rebase', args)
end | `git rebase`
@param [String] upstream the commit-ish to rebase against
@option opts :interactive do an interactive rebase
@option opts [String] :oldbase the old base to rebase from
@return [String] the output of 'git rebase' | train | https://github.com/jdigger/git-process/blob/5853aa94258e724ce0dbc2f1e7407775e1630964/lib/git-process/git_lib.rb#L277-L291 | class GitLib
# @param [Dir] dir the work dir
# @param [Hash] logging_opts see {log_level}
def initialize(dir, logging_opts)
self.log_level = GitLib.log_level(logging_opts)
self.workdir = dir
end
# @return [GitLogger] the logger to use
def logger
if @logger.nil?
@logger = GitLogger.new(log_level)
end
return @logger
end
#
# Decodes the [Hash] to determine what logging level to use
#
# @option opts [Fixnum] :log_level the log level from {Logger}
# @option opts :quiet {Logger::ERROR}
# @option opts :verbose {Logger::DEBUG}
#
# @return [Fixnum] the log level from Logger; defaults to {Logger::INFO}
#
def self.log_level(opts)
if opts[:log_level]
return opts[:log_level]
elsif opts[:quiet]
return Logger::ERROR
elsif opts[:verbose]
return Logger::DEBUG
else
return Logger::INFO
end
end
# @return [Fixnum] the logging level to use; defaults to {Logger::WARN}
def log_level
@log_level || Logger::WARN
end
# @param [Fixnum] lvl the logging level to use. See {Logger}
# @return [void]
def log_level=(lvl)
@log_level = lvl
end
# @return [Dir] the working directory
def workdir
@workdir
end
#
# Sets the working directory to use for the (non-bare) repository.
#
# If the directory is *not* part of an existing repository, a new repository is created. (i.e., "git init")
#
# @param [Dir] dir the working directory
# @return [void]
def workdir=(dir)
workdir = GitLib.find_workdir(dir)
if workdir.nil?
@workdir = dir
logger.info { "Initializing new repository at #{dir}" }
return command(:init)
else
@workdir = workdir
logger.debug { "Opening existing repository at #{dir}" }
end
end
def self.find_workdir(dir)
if dir == File::SEPARATOR
return nil
elsif File.directory?(File.join(dir, '.git'))
return dir
else
return find_workdir(File.expand_path("#{dir}#{File::SEPARATOR}.."))
end
end
# @return [void]
def fetch_remote_changes(remote_name = nil)
if remote.exists?
fetch(remote_name || remote.name)
else
logger.debug 'Can not fetch latest changes because there is no remote defined'
end
end
#
# Executes a rebase, but translates any {GitExecuteError} to a {RebaseError}
#
# @param (see #rebase)
# @option (see #rebase)
# @raise [RebaseError] if there is a problem executing the rebase
def proc_rebase(base, opts = {})
begin
return rebase(base, opts)
rescue GitExecuteError => rebase_error
raise RebaseError.new(rebase_error.message, self)
end
end
#
# Executes a merge, but translates any {GitExecuteError} to a {MergeError}
#
# @param (see #merge)
# @option (see #merge)
# @raise [MergeError] if there is a problem executing the merge
def proc_merge(base, opts = {})
begin
return merge(base, opts)
rescue GitExecuteError => merge_error
raise MergeError.new(merge_error.message, self)
end
end
# @return [String, nil] the previous remote sha ONLY IF it is not the same as the new remote sha; otherwise nil
def previous_remote_sha(current_branch, remote_branch)
return nil unless has_a_remote?
return nil unless remote_branches.include?(remote_branch)
control_file_sha = read_sync_control_file(current_branch)
old_sha = control_file_sha || remote_branch_sha(remote_branch)
fetch_remote_changes
new_sha = remote_branch_sha(remote_branch)
if old_sha != new_sha
logger.info('The remote branch has changed since the last time')
return old_sha
else
logger.debug 'The remote branch has not changed since the last time'
return nil
end
end
def remote_branch_sha(remote_branch)
logger.debug { "getting sha for remotes/#{remote_branch}" }
return rev_parse("remotes/#{remote_branch}") rescue ''
end
# @return [Boolean] is the current branch the "_parked_" branch?
def is_parked?
mybranches = self.branches()
return mybranches.parking == mybranches.current
end
# Push the repository to the server.
#
# @param local_branch [String] the name of the local branch to push from
# @param remote_branch [String] the name of the remote branch to push to
#
# @option opts [Boolean] :local should this do nothing because it is in local-only mode?
# @option opts [Boolean] :force should it force the push even if it can not fast-forward?
# @option opts [Proc] :prepush a block to call before doing the push
# @option opts [Proc] :postpush a block to call after doing the push
#
# @return [void]
#
def push_to_server(local_branch, remote_branch, opts = {})
if opts[:local]
logger.debug('Not pushing to the server because the user selected local-only.')
elsif not has_a_remote?
logger.debug('Not pushing to the server because there is no remote.')
elsif local_branch == config.master_branch
logger.warn('Not pushing to the server because the current branch is the mainline branch.')
else
opts[:prepush].call if opts[:prepush]
push(remote.name, local_branch, remote_branch, :force => opts[:force])
opts[:postpush].call if opts[:postpush]
end
end
# @return [GitConfig] the git configuration
def config
if @config.nil?
@config = GitConfig.new(self)
end
return @config
end
# @return [GitRemote] the git remote configuration
def remote
if @remote.nil?
@remote = GitProc::GitRemote.new(config)
end
return @remote
end
# @return [Boolean] does this have a remote defined?
def has_a_remote?
remote.exists?
end
#
# `git add`
#
# @param [String] file the name of the file to add to the index
# @return [String] the output of 'git add'
def add(file)
logger.info { "Adding #{[*file].join(', ')}" }
return command(:add, ['--', file])
end
#
# `git commit`
#
# @param [String] msg the commit message
# @return [String] the output of 'git commit'
def commit(msg = nil)
logger.info 'Committing changes'
return command(:commit, msg.nil? ? nil : ['-m', msg])
end
#
# `git rebase`
#
# @param [String] upstream the commit-ish to rebase against
# @option opts :interactive do an interactive rebase
# @option opts [String] :oldbase the old base to rebase from
#
# @return [String] the output of 'git rebase'
#
# `git merge`
#
# @return [String] the output of 'git merge'
def merge(base, opts= {})
logger.info { "Merging #{branches.current.name} with #{base}" }
args = []
args << '-s' << opts[:merge_strategy] if opts[:merge_strategy]
args << base
return command(:merge, args)
end
#
# `git fetch`
#
# @return [String] the output of 'git fetch'
def fetch(name = remote.name)
logger.info 'Fetching the latest changes from the server'
output = self.command(:fetch, ['-p', name])
log_fetch_changes(fetch_changes(output))
return output
end
# @return [Hash] with lists for each of :new_branch, :new_tag, :force_updated, :deleted, :updated
def fetch_changes(output)
changed = output.split("\n")
changes = {:new_branch => [], :new_tag => [], :force_updated => [], :deleted => [], :updated => []}
line = changed.shift
until line.nil? do
case line
when /^\s\s\s/
m = /^\s\s\s(\S+)\s+(\S+)\s/.match(line)
changes[:updated] << "#{m[2]} (#{m[1]})"
when /^\s\*\s\[new branch\]/
m = /^\s\*\s\[new branch\]\s+(\S+)\s/.match(line)
changes[:new_branch] << m[1]
when /^\s\*\s\[new tag\]/
m = /^\s\*\s\[new tag\]\s+(\S+)\s/.match(line)
changes[:new_tag] << m[1]
when /^\sx\s/
m = /^\sx\s\[deleted\]\s+\(none\)\s+->\s+[^\/]+\/(\S+)/.match(line)
changes[:deleted] << m[1]
when /^\s\+\s/
m = /^\s\+\s(\S+)\s+(\S+)\s/.match(line)
changes[:force_updated] << "#{m[2]} (#{m[1]})"
else
# ignore the line
end
line = changed.shift
end
changes
end
# @return [GitBranches]
def branches
GitProc::GitBranches.new(self)
end
# @return [GitBranches]
def remote_branches
GitProc::GitBranches.new(self, :remote => true)
end
#
# Does branch manipulation.
#
# @param [String] branch_name the name of the branch
#
# @option opts [Boolean] :delete delete the remote branch
# @option opts [Boolean] :force force the update
# @option opts [Boolean] :all list all branches, local and remote
# @option opts [Boolean] :no_color force not using any ANSI color codes
# @option opts [String] :rename the new name for the branch
# @option opts [String] :upstream the new branch to track
# @option opts [String] :base_branch ('master') the branch to base the new branch off of
#
# @return [String] the output of running the git command
def branch(branch_name, opts = {})
if branch_name
if opts[:delete]
return delete_branch(branch_name, opts[:force])
elsif opts[:rename]
return rename_branch(branch_name, opts[:rename])
elsif opts[:upstream]
return set_upstream_branch(branch_name, opts[:upstream])
else
base_branch = opts[:base_branch] || 'master'
if opts[:force]
return change_branch(branch_name, base_branch)
else
return create_branch(branch_name, base_branch)
end
end
else
#list_branches(opts)
return list_branches(opts[:all], opts[:remote], opts[:no_color])
end
end
#
# Pushes the given branch to the server.
#
# @param [String] remote_name the repository name; nil -> 'origin'
# @param [String] local_branch the local branch to push; nil -> the current branch
# @param [String] remote_branch the name of the branch to push to; nil -> same as local_branch
#
# @option opts [Boolean, String] :delete delete the remote branch
# @option opts [Boolean] :force force the update, even if not a fast-forward?
#
# @return [String] the output of the push command
#
# @raise [ArgumentError] if :delete is true, but no branch name is given
#
def push(remote_name, local_branch, remote_branch, opts = {})
if opts[:delete]
return push_delete(remote_branch || local_branch, remote_name, opts)
else
return push_to_remote(local_branch, remote_branch, remote_name, opts)
end
end
#
# Pushes the given branch to the server.
#
# @param [String] remote_name the repository name; nil -> 'origin'
# @param [String] local_branch the local branch to push; nil -> the current branch
# @param [String] remote_branch the name of the branch to push to; nil -> same as local_branch
#
# @option opts [Boolean] :force force the update, even if not a fast-forward?
#
# @return [String] the output of the push command
#
def push_to_remote(local_branch, remote_branch, remote_name, opts)
remote_name ||= 'origin'
args = [remote_name]
local_branch ||= branches.current
remote_branch ||= local_branch
args << '-f' if opts[:force]
logger.info do
if local_branch == remote_branch
"Pushing to '#{remote_branch}' on '#{remote_name}'."
else
"Pushing #{local_branch} to '#{remote_branch}' on '#{remote_name}'."
end
end
args << "#{local_branch}:#{remote_branch}"
return command(:push, args)
end
#
# Pushes the given branch to the server.
#
# @param [String] remote_name the repository name; nil -> 'origin'
# @param [String] branch_name the name of the branch to push to
#
# @option opts [Boolean, String] :delete if a String it is the branch name
#
# @return [String] the output of the push command
#
# @raise [ArgumentError] no branch name is given
# @raise [raise GitProc::GitProcessError] trying to delete the integration branch
#
# @todo remove the opts param
#
def push_delete(branch_name, remote_name, opts)
remote_name ||= 'origin'
args = [remote_name]
if branch_name
rb = branch_name
elsif !(opts[:delete].is_a? TrueClass)
rb = opts[:delete]
else
raise ArgumentError.new('Need a branch name to delete.')
end
int_branch = config.master_branch
if rb == int_branch
raise GitProc::GitProcessError.new("Can not delete the integration branch '#{int_branch}'")
end
logger.info { "Deleting remote branch '#{rb}' on '#{remote_name}'." }
args << '--delete' << rb
return command(:push, args)
end
# `git rebase --continue`
#
# @return [String] the output of the git command
def rebase_continue
command(:rebase, '--continue')
end
# `git stash --save`
#
# @return [String] the output of the git command
def stash_save
command(:stash, %w(save))
end
# `git stash --pop`
#
# @return [String] the output of the git command
def stash_pop
command(:stash, %w(pop))
end
# `git show`
#
# @return [String] the output of the git command
def show(refspec)
command(:show, refspec)
end
# @param [String] branch_name the name of the branch to checkout/create
# @option opts [Boolean] :no_track do not track the base branch
# @option opts [String] :new_branch the name of the base branch
#
# @return [void]
def checkout(branch_name, opts = {})
args = []
args << '--no-track' if opts[:no_track]
args << '-b' if opts[:new_branch]
args << branch_name
args << opts[:new_branch] if opts[:new_branch]
branches = branches()
command(:checkout, args)
branches << GitBranch.new(branch_name, opts[:new_branch] != nil, self)
if block_given?
yield
command(:checkout, branches.current.name)
branches.current
else
branches[branch_name]
end
end
# @return [int] the number of commits that exist in the current branch
def log_count
command(:log, '--oneline').split(/\n/).length
end
# Remove the files from the Index
#
# @param [Array<String>] files the file names to remove from the Index
#
# @option opts :force if exists and not false, will force the removal of the files
#
# @return [String] the output of the git command
def remove(files, opts = {})
args = []
args << '-f' if opts[:force]
args << [*files]
command(:rm, args)
end
#
# Returns the status of the git repository.
#
# @return [Status]
def status
GitStatus.new(self)
end
# @return [String] the raw porcelain status string
def porcelain_status
command(:status, '--porcelain')
end
#
# Resets the Index/Working Directory to the given revision
#
# @param [String] rev_name the revision name (commit-ish) to go back to
#
# @option opts :hard should the working directory be changed? If {false} or missing, will only update the Index
#
def reset(rev_name, opts = {})
args = []
args << '--hard' if opts[:hard]
args << rev_name
logger.info { "Resetting #{opts[:hard] ? '(hard)' : ''} to #{rev_name}" }
command(:reset, args)
end
def rev_list(start_revision, end_revision, opts ={})
args = []
args << "-#{opts[:num_revs]}" if opts[:num_revs]
args << '--oneline' if opts[:oneline]
args << "#{start_revision}..#{end_revision}"
command('rev-list', args)
end
#
# Translate the commit-ish name to the SHA-1 hash value
#
# @return [String, nil] the SHA-1 value, or nil if the revision name is unknown
#
def rev_parse(name)
sha = command('rev-parse', ['--revs-only', name])
return sha.empty? ? nil : sha
end
alias :sha :rev_parse
#
# Executes the given git command
#
# @param [Symbol, String] cmd the command to run (e.g., :commit)
# @param [Array<String, Symbol>] opts the arguments to pass to the command
# @param [Boolean] chdir should the shell change to the top of the working dir before executing the command?
# @param [String] redirect ???????
# @yield the block to run in the context of running the command
#
# @return [String] the output of the git command
#
def command(cmd, opts = [], chdir = true, redirect = '', &block)
ENV['GIT_INDEX_FILE'] = File.join(workdir, '.git', 'index')
ENV['GIT_DIR'] = File.join(workdir, '.git')
ENV['GIT_WORK_TREE'] = workdir
path = workdir
git_cmd = create_git_command(cmd, opts, redirect)
out = command_git_cmd(path, git_cmd, chdir, block)
if logger
logger.debug(git_cmd)
logger.debug(out)
end
handle_exitstatus($?, git_cmd, out)
end
#
# Writes the current SHA-1 for the tip of the branch to the "sync control file"
#
# @return [void]
# @see GitLib#read_sync_control_file
def write_sync_control_file(branch_name)
latest_sha = rev_parse(branch_name)
filename = sync_control_filename(branch_name)
logger.debug { "Writing sync control file, #{filename}, with #{latest_sha}" }
File.open(filename, 'w') { |f| f.puts latest_sha }
end
# @return [String, nil] the SHA-1 of the latest sync performed for the branch, or nil if none is recorded
# @see GitLib#write_sync_control_file
def read_sync_control_file(branch_name)
filename = sync_control_filename(branch_name)
if File.exists?(filename)
sha = File.open(filename) do |file|
file.readline.chop
end
logger.debug "Read sync control file, #{filename}: #{sha}"
sha
else
logger.debug "Sync control file, #{filename}, was not found"
nil
end
end
#
# Delete the sync control file for the branch
#
# @return [void]
# @see GitLib#write_sync_control_file
def delete_sync_control_file!(branch_name)
filename = sync_control_filename(branch_name)
logger.debug { "Deleting sync control file, #{filename}" }
# on some systems, especially Windows, the file may be locked. wait for it to unlock
counter = 10
while counter > 0
begin
File.delete(filename)
counter = 0
rescue
counter = counter - 1
sleep(0.25)
end
end
end
# @return [Boolean] does the sync control file exist?
# @see GitLib#write_sync_control_file
def sync_control_file_exists?(branch_name)
filename = sync_control_filename(branch_name)
File.exist?(filename)
end
def set_upstream_branch(branch_name, upstream)
logger.info { "Setting upstream/tracking for branch '#{branch_name}' to '#{upstream}'." }
if has_a_remote?
parts = upstream.split(/\//)
if parts.length() > 1
potential_remote = parts.shift
if remote.remote_names.include?(potential_remote)
config["branch.#{branch_name}.remote"] = potential_remote
config["branch.#{branch_name}.merge"] = "refs/heads/#{parts.join('/')}"
end
else
config["branch.#{branch_name}.merge"] = "refs/heads/#{upstream}"
end
else
config["branch.#{branch_name}.merge"] = "refs/heads/#{upstream}"
end
# The preferred way assuming using git 1.8 cli
#command(:branch, ['--set-upstream-to', upstream, branch_name])
end
private
#
# Create the CLI for the git command
#
# @param [Symbol, String] cmd the command to run (e.g., :commit)
# @param [Array<String, Symbol>] opts the arguments to pass to the command
# @param [String] redirect ???????
#
# @return [String] the command line to run
#
def create_git_command(cmd, opts, redirect)
opts = [opts].flatten.map { |s| escape(s) }.join(' ')
return "git #{cmd} #{opts} #{redirect} 2>&1"
end
#
# Executes the given git command
#
# @param [String] path the directory to run the command in
# @param [String] git_cmd the CLI command to execute
# @param [Boolean] chdir should the shell change to the top of the working dir before executing the command?
# @param [Proc] block the block to run in the context of running the command
#
# @return [String] the output of the git command
#
def command_git_cmd(path, git_cmd, chdir, block)
out = nil
if chdir and (Dir.getwd != path)
Dir.chdir(path) { out = run_command(git_cmd, &block) }
else
out = run_command(git_cmd, &block)
end
return out
end
# @return [String]
def handle_exitstatus(proc_status, git_cmd, out)
if proc_status.exitstatus > 0
unless proc_status.exitstatus == 1 && out == ''
raise GitProc::GitExecuteError.new(git_cmd + ':' + out.to_s)
end
end
return out
end
#
# Executes the given git command
#
# @param [String] git_cmd the CLI command to execute
# @yield the block to run in the context of running the command. See {IO#popen}
#
# @return [String] the output of the git command
#
def run_command(git_cmd, &block)
if block_given?
return IO.popen(git_cmd, &block)
else
return `#{git_cmd}`.chomp
end
end
# @return [String]
def escape(s)
escaped = s.to_s.gsub('\'', '\'\\\'\'')
%Q{"#{escaped}"}
end
# @return [String]
def change_branch(branch_name, base_branch)
raise ArgumentError.new('Need :base_branch when using :force for a branch.') unless base_branch
logger.info { "Changing branch '#{branch_name}' to point to '#{base_branch}'." }
command(:branch, ['-f', branch_name, base_branch])
end
# @return [String]
def create_branch(branch_name, base_branch)
logger.info { "Creating new branch '#{branch_name}' based on '#{base_branch}'." }
command(:branch, [branch_name, (base_branch || 'master')])
end
# @return [String]
def list_branches(all_branches, remote_branches, no_color)
args = []
args << '-a' if all_branches
args << '-r' if remote_branches
args << '--no-color' if no_color
command(:branch, args)
end
# @return [String]
def delete_branch(branch_name, force)
logger.info { "Deleting local branch '#{branch_name}'." } unless branch_name == '_parking_'
command(:branch, [force ? '-D' : '-d', branch_name])
end
# @return [String]
def rename_branch(branch_name, new_name)
logger.info { "Renaming branch '#{branch_name}' to '#{new_name}'." }
command(:branch, ['-m', branch_name, new_name])
end
# @return [String]
def sync_control_filename(branch_name)
normalized_branch_name = branch_name.to_s.gsub(/[\/]/, "-")
return File.join(File.join(workdir, '.git'), "gitprocess-sync-#{remote.name}--#{normalized_branch_name}")
end
# @param [Hash] changes a hash of the changes that were made
#
# @return [void]
def log_fetch_changes(changes)
changes.each do |key, v|
unless v.empty?
logger.info { " #{key.to_s.sub(/_/, ' ')}: #{v.join(', ')}" }
end
end
end
end
|
ikayzo/SDL.rb | lib/sdl4r/parser.rb | SDL4R.Parser.parse | ruby | def parse
tags = []
while tokens = @tokenizer.read_line_tokens()
if tokens.last.type == :START_BLOCK
# tag with a block
tag = construct_tag(tokens[0...-1])
add_children(tag)
tags << tag
elsif tokens.first.type == :END_BLOCK
# we found an block end token that should have been consumed by
# add_children() normally
parse_error(
"No opening block ({) for close block (}).",
tokens.first.line,
tokens.first.position)
else
# tag without block
tags << construct_tag(tokens)
end
end
@tokenizer.close()
return tags
end | Creates an SDL parser on the specified +IO+.
IO.open("path/to/sdl_file") { |io|
parser = SDL4R::Parser.new(io)
tags = parser.parse()
}
Parses the underlying +IO+ and returns an +Array+ of +Tag+.
==Errors
[IOError] If a problem is encountered with the IO
[SdlParseError] If the document is malformed | train | https://github.com/ikayzo/SDL.rb/blob/1663b9f5aa95d8d6269f060e343c2d2fd9309259/lib/sdl4r/parser.rb#L70-L96 | class Parser
# Passed to parse_error() in order to specify an error that occured on no specific position
# (column).
UNKNOWN_POSITION = -2
# Creates an SDL parser on the specified +IO+.
#
# IO.open("path/to/sdl_file") { |io|
# parser = SDL4R::Parser.new(io)
# tags = parser.parse()
# }
#
def initialize(io)
raise ArgumentError, "io == nil" if io.nil?
@tokenizer = Tokenizer.new(io)
end
# Parses the underlying +IO+ and returns an +Array+ of +Tag+.
#
# ==Errors
# [IOError] If a problem is encountered with the IO
# [SdlParseError] If the document is malformed
def parse
tags = []
while tokens = @tokenizer.read_line_tokens()
if tokens.last.type == :START_BLOCK
# tag with a block
tag = construct_tag(tokens[0...-1])
add_children(tag)
tags << tag
elsif tokens.first.type == :END_BLOCK
# we found an block end token that should have been consumed by
# add_children() normally
parse_error(
"No opening block ({) for close block (}).",
tokens.first.line,
tokens.first.position)
else
# tag without block
tags << construct_tag(tokens)
end
end
@tokenizer.close()
return tags
end
# Creates and returns the object representing a datetime (DateTime in the default
# implementation). Can be overriden.
#
# def new_date_time(year, month, day, hour, min, sec, time_zone_offset)
# Time.utc(year, month, day, hour, min, sec)
# end
#
def new_date_time(year, month, day, hour, min, sec, time_zone_offset)
SDL4R::new_date_time(year, month, day, hour, min, sec, time_zone_offset)
end
private
# Parses the children tags of +parent+ until an end of block is found.
def add_children(parent)
while tokens = @tokenizer.read_line_tokens()
if tokens.first.type == :END_BLOCK
return
elsif tokens.last.type == :START_BLOCK
# found a child with a block
tag = construct_tag(tokens[0...-1]);
add_children(tag)
parent.add_child(tag)
else
parent.add_child(construct_tag(tokens))
end
end
parse_error("No close block (}).", @tokenizer.line_no, UNKNOWN_POSITION)
end
# Construct a Tag (but not its children) from a string of tokens
#
# Throws SdlParseError if some bad syntax is found.
def construct_tag(tokens)
raise ArgumentError, "tokens == nil" if tokens.nil?
if tokens.empty?
parse_error("Internal Error: empty token list", @tokenizer.line_no, UNKNOWN_POSITION)
end
first_token = tokens.first
if first_token.literal?
first_token = Token.new("content")
tokens.insert(0, first_token)
elsif first_token.type != :IDENTIFIER
expecting_but_got(
"IDENTIFIER",
"#{first_token.type} (#{first_token.text})",
first_token.line,
first_token.position)
end
tag = nil
if tokens.size == 1
tag = Tag.new(first_token.text)
else
values_start_index = 1
second_token = tokens[1]
if second_token.type == :COLON
if tokens.size == 2 or tokens[2].type != :IDENTIFIER
parse_error(
"Colon (:) encountered in unexpected location.",
second_token.line,
second_token.position)
end
third_token = tokens[2];
tag = Tag.new(first_token.text, third_token.text)
values_start_index = 3
else
tag = Tag.new(first_token.text)
end
# read values
attribute_start_index = add_tag_values(tag, tokens, values_start_index)
# read attributes
if attribute_start_index < tokens.size
add_tag_attributes(tag, tokens, attribute_start_index)
end
end
return tag
end
#
# @return The position at the end of the value list
#
def add_tag_values(tag, tokens, start)
size = tokens.size()
i = start;
while i < size
token = tokens[i]
if token.literal?
# if a DATE token is followed by a TIME token combine them
next_token = ((i + 1) < size)? tokens[i + 1] : nil
if token.type == :DATE && next_token && next_token.type == :TIME
date = token.object_for_literal()
time_zone_with_zone = next_token.object_for_literal()
if time_zone_with_zone.day != 0
# as there are days specified, it can't be a full precision date
tag.add_value(date);
tag.add_value(
SdlTimeSpan.new(
time_zone_with_zone.day,
time_zone_with_zone.hour,
time_zone_with_zone.min,
time_zone_with_zone.sec))
if time_zone_with_zone.time_zone_offset
parse_error("TimeSpan cannot have a timeZone", t.line, t.position)
end
else
tag.add_value(combine(date, time_zone_with_zone))
end
i += 1
else
value = token.object_for_literal()
if value.is_a?(TimeSpanWithZone)
# the literal looks like a time zone
if value.time_zone_offset
expecting_but_got(
"TIME SPAN",
"TIME (component of date/time)",
token.line,
token.position)
end
tag.add_value(
SdlTimeSpan.new(
value.day,
value.hour,
value.min,
value.sec))
else
tag.add_value(value)
end
end
elsif token.type == :IDENTIFIER
break
else
expecting_but_got(
"LITERAL or IDENTIFIER", token.type, token.line, token.position)
end
i += 1
end
return i
end
#
# Add attributes to the given tag
#
def add_tag_attributes(tag, tokens, start)
i = start
size = tokens.size
while i < size
token = tokens[i]
if token.type != :IDENTIFIER
expecting_but_got("IDENTIFIER", token.type, token.line, token.position)
end
name_or_namespace = token.text;
if i == (size - 1)
expecting_but_got(
"\":\" or \"=\" \"LITERAL\"",
"END OF LINE.",
token.line,
token.position)
end
i += 1
token = tokens[i]
if token.type == :COLON
if i == (size - 1)
expecting_but_got(
"IDENTIFIER", "END OF LINE", token.line, token.position)
end
i += 1
token = tokens[i]
if token.type != :IDENTIFIER
expecting_but_got(
"IDENTIFIER", token.type, token.line, token.position)
end
name = token.text
if i == (size - 1)
expecting_but_got("\"=\"", "END OF LINE", token.line, token.position)
end
i += 1
token = tokens[i]
if token.type != :EQUALS
expecting_but_got("\"=\"", token.type, token.line, token.position)
end
if i == (size - 1)
expecting_but_got("LITERAL", "END OF LINE", token.line, token.position)
end
i += 1
token = tokens[i]
if !token.literal?
expecting_but_got("LITERAL", token.type, token.line, token.position)
end
if token.type == :DATE and (i + 1) < size and tokens[i + 1].type == :TIME
date = token.get_object_for_literal()
time_span_with_zone = tokens[i + 1].get_object_for_literal()
if time_span_with_zone.days != 0
expecting_but_got(
"TIME (component of date/time) in attribute value",
"TIME SPAN",
token.line,
token.position)
else
tag.set_attribute(name_or_namespace, name, combine(date, time_span_with_zone))
end
i += 1
else
value = token.object_for_literal();
if value.is_a?(TimeSpanWithZone)
time_span_with_zone = value
if time_span_with_zone.time_zone_offset
expecting_but_got(
"TIME SPAN",
"TIME (component of date/time)",
token.line,
token.position)
end
time_span = SdlTimeSpan.new(
time_span_with_zone.day,
time_span_with_zone.hour,
time_span_with_zone.min,
time_span_with_zone.sec)
tag.set_attribute(name_or_namespace, name, time_span)
else
tag.set_attribute(name_or_namespace, name, value);
end
end
elsif token.type == :EQUALS
if i == (size - 1)
expecting_but_got("LITERAL", "END OF LINE", token.line, token.position)
end
i += 1
token = tokens[i]
if !token.literal?
expecting_but_got("LITERAL", token.type, token.line, token.position)
end
if token.type == :DATE and (i + 1) < size and tokens[i + 1].type == :TIME
date = token.object_for_literal()
time_span_with_zone = tokens[i + 1].object_for_literal()
if time_span_with_zone.day != 0
expecting_but_got(
"TIME (component of date/time) in attribute value",
"TIME SPAN",
token.line,
token.position)
end
tag.set_attribute(name_or_namespace, combine(date, time_span_with_zone))
i += 1
else
value = token.object_for_literal()
if value.is_a?(TimeSpanWithZone)
time_span_with_zone = value
if time_span_with_zone.time_zone_offset
expecting_but_got(
"TIME SPAN",
"TIME (component of date/time)",
token.line,
token.position)
end
time_span = SdlTimeSpan.new(
time_span_with_zone.day,
time_span_with_zone.hour,
time_span_with_zone.min,
time_span_with_zone.sec)
tag.set_attribute(name_or_namespace, time_span)
else
tag.set_attribute(name_or_namespace, value);
end
end
else
expecting_but_got(
"\":\" or \"=\"", token.type, token.line, token.position)
end
i += 1
end
end
# Combines a simple Date with a TimeSpanWithZone to create a DateTime
#
def combine(date, time_span_with_zone)
time_zone_offset = time_span_with_zone.time_zone_offset
time_zone_offset = TimeSpanWithZone.default_time_zone_offset if time_zone_offset.nil?
new_date_time(
date.year,
date.month,
date.day,
time_span_with_zone.hour,
time_span_with_zone.min,
time_span_with_zone.sec,
time_zone_offset)
end
private
############################################################################
## Parsers for types
############################################################################
def Parser.parse_string(literal)
unless literal =~ /(^`.*`$)|(^\".*\"$)/m
raise ArgumentError,
"Malformed string <#{literal}>." +
" Strings must start and end with \" or `"
end
return literal[1..-2]
end
def Parser.parse_character(literal)
unless literal =~ /(^'.*'$)/
raise ArgumentError,
"Malformed character <#{literal}>." +
" Character must start and end with single quotes"
end
return literal[1]
end
def Parser.parse_number(literal)
# we use the fact that Kernel.Integer() and Kernel.Float() raise ArgumentErrors
if literal =~ /(.*)(L)$/i
return Integer($1)
elsif literal =~ /([^BDF]*)(BD)$/i
return BigDecimal($1)
elsif literal =~ /([^BDF]*)(F|D)$/i
return Float($1)
elsif literal.count(".e") == 0
return Integer(literal)
else
return Float(literal)
end
end
# Parses the given literal into a returned array
# [days, hours, minutes, seconds, time_zone_offset].
# 'days', 'hours' and 'minutes' are integers.
# 'seconds' and 'time_zone_offset' are rational numbers.
# 'days' and 'seconds' are equal to 0 if they're not specified in ((|literal|)).
# 'time_zone_offset' is equal to nil if not specified.
#
# ((|allowDays|)) indicates whether the specification of days is allowed
# in ((|literal|))
# ((|allowTimeZone|)) indicates whether the specification of the timeZone is
# allowed in ((|literal|))
#
# All components are returned disregarding the values of ((|allowDays|)) and
# ((|allowTimeZone|)).
#
# Raises an ArgumentError if ((|literal|)) has a bad format.
def Parser.parse_time_span_and_time_zone(literal, allowDays, allowTimeZone)
overall_sign = (literal =~ /^-/)? -1 : +1
if literal =~ /^(([+\-]?\d+)d:)/
if allowDays
days = Integer($2)
days_specified = true
time_part = literal[($1.length)..-1]
else
# detected a day specification in a pure time literal
raise ArgumentError, "unexpected day specification in #{literal}"
end
else
days = 0;
days_specified = false
time_part = literal
end
# We have to parse the string ourselves because AFAIK :
# - strptime() can't parse milliseconds
# - strptime() can't parse the time zone custom offset (CET+02:30)
# - strptime() accepts trailing chars
# (e.g. "12:24-xyz@" ==> "xyz@" is obviously wrong but strptime()
# won't mind)
if time_part =~ /^([+-]?\d+):(\d+)(?::(\d+)(?:\.(\d+))?)?(?:(?:-([a-zA-Z]+))?(?:([\+\-]\d+)(?::(\d+))?)?)?$/i
hours = $1.to_i
minutes = $2.to_i
# seconds and milliseconds are implemented as one rational number
# unless there are no milliseconds
millisecond_part = ($4)? $4.ljust(3, "0") : nil
if millisecond_part
seconds = Rational(($3 + millisecond_part).to_i, 10 ** millisecond_part.length)
else
seconds = ($3)? Integer($3) : 0
end
if ($5 or $6) and not allowTimeZone
raise ArgumentError, "unexpected time zone specification in #{literal}"
end
time_zone_code = $5 # might be nil
if $6
zone_custom_minute_offset = $6.to_i * 60
if $7
if zone_custom_minute_offset > 0
zone_custom_minute_offset = zone_custom_minute_offset + $7.to_i
else
zone_custom_minute_offset = zone_custom_minute_offset - $7.to_i
end
end
end
time_zone_offset = get_time_zone_offset(time_zone_code, zone_custom_minute_offset)
if not allowDays and $1 =~ /^[+-]/
# unexpected timeSpan syntax
raise ArgumentError, "unexpected sign on hours : #{literal}"
end
# take the sign into account
hours *= overall_sign if days_specified # otherwise the sign is already applied to the hours
minutes *= overall_sign
seconds *= overall_sign
return [ days, hours, minutes, seconds, time_zone_offset ]
else
raise ArgumentError, "bad time component : #{literal}"
end
end
# Parses the given literal (String) into a returned DateTime object.
#
# Raises an ArgumentError if ((|literal|)) has a bad format.
def Parser.parse_date_time(literal)
raise ArgumentError("date literal is nil") if literal.nil?
begin
parts = literal.split(" ")
if parts.length == 1
return parse_date(literal)
else
date = parse_date(parts[0]);
time_part = parts[1]
days, hours, minutes, seconds, time_zone_offset =
parse_time_span_and_time_zone(time_part, false, true)
return new_date_time(
date.year, date.month, date.day, hours, minutes, seconds, time_zone_offset)
end
rescue ArgumentError
raise ArgumentError, "Bad date/time #{literal} : #{$!.message}"
end
end
##
# Returns the time zone offset (Rational) corresponding to the provided parameters as a fraction
# of a day. This method adds the two offsets if they are both provided.
#
# +time_zone_code+: can be nil
# +custom_minute_offset+: can be nil
#
def Parser.get_time_zone_offset(time_zone_code, custom_minute_offset)
return nil unless time_zone_code or custom_minute_offset
time_zone_offset = custom_minute_offset ? Rational(custom_minute_offset, 60 * 24) : 0
return time_zone_offset unless time_zone_code
# we have to provide some bogus year/month/day in order to parse our time zone code
d = DateTime.strptime("1999/01/01 #{time_zone_code}", "%Y/%m/%d %Z")
# the offset is a fraction of a day
return d.offset() + time_zone_offset
end
# Parses the +literal+ into a returned Date object.
#
# Raises an ArgumentError if +literal+ has a bad format.
def Parser.parse_date(literal)
# here, we're being stricter than strptime() alone as we forbid trailing chars
if literal =~ /^(\d+)\/(\d+)\/(\d+)$/
begin
return Date.strptime(literal, "%Y/%m/%d")
rescue ArgumentError
raise ArgumentError, "Malformed Date <#{literal}> : #{$!.message}"
end
end
raise ArgumentError, "Malformed Date <#{literal}>"
end
# Returns a String that contains the binary content corresponding to ((|literal|)).
#
# ((|literal|)) : a base-64 encoded literal (e.g.
# "[V2hvIHdhbnRzIHRvIGxpdmUgZm9yZXZlcj8=]")
def Parser.parse_binary(literal)
clean_literal = literal[1..-2] # remove square brackets
return SdlBinary.decode64(clean_literal)
end
# Parses +literal+ (String) into the corresponding SDLTimeSpan, which is then
# returned.
#
# Raises an ArgumentError if the literal is not a correct timeSpan literal.
def Parser.parse_time_span(literal)
days, hours, minutes, seconds, time_zone_offset =
parse_time_span_and_time_zone(literal, true, false)
milliseconds = ((seconds - seconds.to_i) * 1000).to_i
seconds = seconds.to_i
return SDLTimeSpan.new(days, hours, minutes, seconds, milliseconds)
end
# Close the reader and throw a SdlParseError using the format
# Was expecting X but got Y.
#
def expecting_but_got(expecting, got, line, position)
@tokenizer.expecting_but_got(expecting, got, line, position)
end
end
|
xing/beetle | lib/beetle/publisher.rb | Beetle.Publisher.rpc | ruby | def rpc(message_name, data, opts={}) #:nodoc:
opts = @client.messages[message_name].merge(opts.symbolize_keys)
exchange_name = opts.delete(:exchange)
opts.delete(:queue)
recycle_dead_servers unless @dead_servers.empty?
tries = @servers.size
logger.debug "Beetle: performing rpc with message #{message_name}"
result = nil
status = "TIMEOUT"
begin
select_next_server
bind_queues_for_exchange(exchange_name)
# create non durable, autodeleted temporary queue with a server assigned name
queue = bunny.queue
opts = Message.publishing_options(opts.merge :reply_to => queue.name)
logger.debug "Beetle: trying to send #{message_name}:#{opts[:message_id]} to #{@server}"
exchange(exchange_name).publish(data, opts)
logger.debug "Beetle: message sent!"
logger.debug "Beetle: listening on reply queue #{queue.name}"
queue.subscribe(:message_max => 1, :timeout => opts[:timeout] || RPC_DEFAULT_TIMEOUT) do |msg|
logger.debug "Beetle: received reply!"
result = msg[:payload]
status = msg[:header].properties[:headers][:status]
end
logger.debug "Beetle: rpc complete!"
rescue *bunny_exceptions => e
stop!(e)
mark_server_dead
tries -= 1
retry if tries > 0
logger.error "Beetle: message could not be delivered: #{message_name}"
end
[status, result]
end | :nodoc: | train | https://github.com/xing/beetle/blob/42322edc78e6e181b3b9ee284c3b00bddfc89108/lib/beetle/publisher.rb#L103-L136 | class Publisher < Base
attr_reader :dead_servers
def initialize(client, options = {}) #:nodoc:
super
@exchanges_with_bound_queues = {}
@dead_servers = {}
@bunnies = {}
at_exit { stop }
end
# list of exceptions potentially raised by bunny
# these need to be lazy, because qrack exceptions are only defined after a connection has been established
def bunny_exceptions
[
Bunny::ConnectionError, Bunny::ForcedChannelCloseError, Bunny::ForcedConnectionCloseError,
Bunny::MessageError, Bunny::ProtocolError, Bunny::ServerDownError, Bunny::UnsubscribeError,
Bunny::AcknowledgementError, Qrack::BufferOverflowError, Qrack::InvalidTypeError,
Errno::EHOSTUNREACH, Errno::ECONNRESET, Timeout::Error
]
end
def publish(message_name, data, opts={}) #:nodoc:
ActiveSupport::Notifications.instrument('publish.beetle') do
opts = @client.messages[message_name].merge(opts.symbolize_keys)
exchange_name = opts.delete(:exchange)
opts.delete(:queue)
recycle_dead_servers unless @dead_servers.empty?
if opts[:redundant]
publish_with_redundancy(exchange_name, message_name, data, opts)
else
publish_with_failover(exchange_name, message_name, data, opts)
end
end
end
def publish_with_failover(exchange_name, message_name, data, opts) #:nodoc:
tries = @servers.size * 2
logger.debug "Beetle: sending #{message_name}"
published = 0
opts = Message.publishing_options(opts)
begin
select_next_server if tries.even?
bind_queues_for_exchange(exchange_name)
logger.debug "Beetle: trying to send message #{message_name}:#{opts[:message_id]} to #{@server}"
exchange(exchange_name).publish(data, opts)
logger.debug "Beetle: message sent!"
published = 1
rescue *bunny_exceptions => e
stop!(e)
tries -= 1
# retry same server on receiving the first exception for it (might have been a normal restart)
# in this case you'll see either a broken pipe or a forced connection shutdown error
retry if tries.odd?
mark_server_dead
retry if tries > 0
logger.error "Beetle: message could not be delivered: #{message_name}"
raise NoMessageSent.new
end
published
end
def publish_with_redundancy(exchange_name, message_name, data, opts) #:nodoc:
if @servers.size < 2
logger.warn "Beetle: at least two active servers are required for redundant publishing" if @dead_servers.size > 0
return publish_with_failover(exchange_name, message_name, data, opts)
end
published = []
opts = Message.publishing_options(opts)
loop do
break if published.size == 2 || @servers.empty? || published == @servers
tries = 0
select_next_server
begin
next if published.include? @server
bind_queues_for_exchange(exchange_name)
logger.debug "Beetle: trying to send #{message_name}:#{opts[:message_id]} to #{@server}"
exchange(exchange_name).publish(data, opts)
published << @server
logger.debug "Beetle: message sent (#{published})!"
rescue *bunny_exceptions => e
stop!(e)
retry if (tries += 1) == 1
mark_server_dead
end
end
case published.size
when 0
logger.error "Beetle: message could not be delivered: #{message_name}"
raise NoMessageSent.new
when 1
logger.warn "Beetle: failed to send message redundantly"
end
published.size
end
RPC_DEFAULT_TIMEOUT = 10 #:nodoc:
def purge(queue_names) #:nodoc:
each_server do
queue_names.each do |name|
queue(name).purge rescue nil
end
end
end
def setup_queues_and_policies(queue_names) #:nodoc:
each_server do
queue_names.each do |name|
queue(name, create_policies: true)
end
end
end
def stop #:nodoc:
each_server { stop! }
end
private
def bunny
@bunnies[@server] ||= new_bunny
end
def bunny?
@bunnies[@server]
end
def new_bunny
b = Bunny.new(
:host => current_host,
:port => current_port,
:logging => !!@options[:logging],
:user => @client.config.user,
:pass => @client.config.password,
:vhost => @client.config.vhost,
:frame_max => @client.config.frame_max,
:channel_max => @client.config.channel_max,
:socket_timeout => @client.config.publishing_timeout,
:connect_timeout => @client.config.publisher_connect_timeout,
:spec => '09')
b.start
b
end
# retry dead servers after ignoring them for 10.seconds
# if all servers are dead, retry the one which has been dead for the longest time
def recycle_dead_servers
recycle = []
@dead_servers.each do |s, dead_since|
recycle << s if dead_since < 10.seconds.ago
end
if recycle.empty? && @servers.empty?
recycle << @dead_servers.keys.sort_by{|k| @dead_servers[k]}.first
end
@servers.concat recycle
recycle.each {|s| @dead_servers.delete(s)}
end
def mark_server_dead
logger.info "Beetle: server #{@server} down: #{$!}"
@dead_servers[@server] = Time.now
@servers.delete @server
@server = @servers[rand @servers.size]
end
def select_next_server
if @servers.empty?
logger.error("Beetle: no server available")
else
set_current_server(@servers[((@servers.index(@server) || 0)+1) % @servers.size])
end
end
def create_exchange!(name, opts)
bunny.exchange(name, opts)
end
def bind_queues_for_exchange(exchange_name)
return if @exchanges_with_bound_queues.include?(exchange_name)
@client.exchanges[exchange_name][:queues].each {|q| queue(q) }
@exchanges_with_bound_queues[exchange_name] = true
end
# TODO: Refactor, fetch the keys and stuff itself
def bind_queue!(queue_name, creation_keys, exchange_name, binding_keys, create_policies: false)
logger.debug("Beetle: creating queue with opts: #{creation_keys.inspect}")
queue = bunny.queue(queue_name, creation_keys)
if create_policies
@dead_lettering.bind_dead_letter_queues!(bunny, @client.servers, queue_name, creation_keys)
end
logger.debug("Beetle: binding queue #{queue_name} to #{exchange_name} with opts: #{binding_keys.inspect}")
queue.bind(exchange(exchange_name), binding_keys)
queue
end
def stop!(exception=nil)
return unless bunny?
timeout = @client.config.publishing_timeout + @client.config.publisher_connect_timeout + 1
Beetle::Timer.timeout(timeout) do
logger.debug "Beetle: closing connection from publisher to #{server}"
if exception
bunny.__send__ :close_socket
else
bunny.stop
end
end
rescue Exception => e
logger.warn "Beetle: error closing down bunny: #{e}"
Beetle::reraise_expectation_errors!
ensure
@bunnies[@server] = nil
@exchanges[@server] = {}
@queues[@server] = {}
end
end
|
nylas/nylas-ruby | lib/nylas/collection.rb | Nylas.Collection.where | ruby | def where(filters)
raise ModelNotFilterableError, model unless model.filterable?
self.class.new(model: model, api: api, constraints: constraints.merge(where: filters))
end | Merges in additional filters when querying the collection
@return [Collection<Model>] | train | https://github.com/nylas/nylas-ruby/blob/5453cf9b2e9d80ee55e38ff5a6c8b19b8d5c262d/lib/nylas/collection.rb#L30-L34 | class Collection
attr_accessor :model, :api, :constraints
extend Forwardable
def_delegators :each, :map, :select, :reject, :to_a, :take
def_delegators :to_a, :first, :last, :[]
def initialize(model:, api:, constraints: nil)
self.constraints = Constraints.from_constraints(constraints)
self.model = model
self.api = api
end
# Instantiates a new model
def new(**attributes)
model.new(attributes.merge(api: api))
end
def create(**attributes)
instance = model.new(attributes.merge(api: api))
instance.save
instance
end
# Merges in additional filters when querying the collection
# @return [Collection<Model>]
def search(query)
raise ModelNotSearchableError, model unless model.searchable?
SearchCollection.new(model: model, api: api, constraints: constraints.merge(where: { q: query }))
end
# The collection now returns a string representation of the model in a particular mime type instead of
# Model objects
# @return [Collection<String>]
def raw
raise ModelNotAvailableAsRawError, model unless model.exposable_as_raw?
self.class.new(model: model, api: api, constraints: constraints.merge(accept: model.raw_mime_type))
end
# @return [Integer]
def count
self.class.new(model: model, api: api, constraints: constraints.merge(view: "count")).execute[:count]
end
# @return [Collection<Model>]
def expanded
self.class.new(model: model, api: api, constraints: constraints.merge(view: "expanded"))
end
# @return [Array<String>]
def ids
self.class.new(model: model, api: api, constraints: constraints.merge(view: "ids")).execute
end
# Iterates over a single page of results based upon current pagination settings
def each
return enum_for(:each) unless block_given?
execute.each do |result|
yield(model.new(result.merge(api: api)))
end
end
def limit(quantity)
self.class.new(model: model, api: api, constraints: constraints.merge(limit: quantity))
end
def offset(start)
self.class.new(model: model, api: api, constraints: constraints.merge(offset: start))
end
# Iterates over every result that meets the filters, retrieving a page at a time
def find_each
return enum_for(:find_each) unless block_given?
query = self
accumulated = 0
while query
results = query.each do |instance|
yield(instance)
end
accumulated += results.length
query = query.next_page(accumulated: accumulated, current_page: results)
end
end
def next_page(accumulated:, current_page:)
return nil unless more_pages?(accumulated, current_page)
self.class.new(model: model, api: api, constraints: constraints.next_page)
end
def more_pages?(accumulated, current_page)
return false if current_page.empty?
return false if constraints.limit && accumulated >= constraints.limit
return false if constraints.per_page && current_page.length < constraints.per_page
true
end
# Retrieves a record. Nylas doesn't support where filters on GET so this will not take into
# consideration other query constraints, such as where clauses.
def find(id)
constraints.accept == "application/json" ? find_model(id) : find_raw(id)
end
def find_raw(id)
api.execute(to_be_executed.merge(path: "#{resources_path}/#{id}")).to_s
end
def resources_path
model.resources_path(api: api)
end
def find_model(id)
instance = model.from_hash({ id: id }, api: api)
instance.reload
instance
end
# @return [Hash] Specification for request to be passed to {API#execute}
def to_be_executed
{ method: :get, path: resources_path, query: constraints.to_query,
headers: constraints.to_headers }
end
# Retrieves the data from the API for the particular constraints
# @return [Hash,Array]
def execute
api.execute(to_be_executed)
end
end
|
wvanbergen/request-log-analyzer | lib/request_log_analyzer/file_format.rb | RequestLogAnalyzer::FileFormat.CommonRegularExpressions.add_blank_option | ruby | def add_blank_option(regexp, blank)
case blank
when String then Regexp.union(regexp, Regexp.new(Regexp.quote(blank)))
when true then Regexp.union(regexp, //)
else regexp
end
end | Allow the field to be blank if this option is given. This can be true to
allow an empty string or a string alternative for the nil value. | train | https://github.com/wvanbergen/request-log-analyzer/blob/b83865d440278583ac8e4901bb33878244fd7c75/lib/request_log_analyzer/file_format.rb#L184-L190 | module CommonRegularExpressions
TIMESTAMP_PARTS = {
'a' => '(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun)',
'b' => '(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)',
'y' => '\d{2}', 'Y' => '\d{4}', 'm' => '\d{2}', 'd' => '\d{2}',
'H' => '\d{2}', 'M' => '\d{2}', 'S' => '\d{2}', 'k' => '(?:\d| )\d',
'z' => '(?:[+-]\d{4}|[A-Z]{3,4})',
'Z' => '(?:[+-]\d{4}|[A-Z]{3,4})',
'%' => '%'
}
# Creates a regular expression to match a hostname
def hostname(blank = false)
regexp = /(?:(?:[a-zA-Z]|[a-zA-Z][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*(?:[A-Za-z]|[A-Za-z][A-Za-z0-9\-]*[A-Za-z0-9])/
add_blank_option(regexp, blank)
end
# Creates a regular expression to match a hostname or ip address
def hostname_or_ip_address(blank = false)
regexp = Regexp.union(hostname, ip_address)
add_blank_option(regexp, blank)
end
# Create a regular expression for a timestamp, generated by a strftime call.
# Provide the format string to construct a matching regular expression.
# Set blank to true to allow and empty string, or set blank to a string to set
# a substitute for the nil value.
def timestamp(format_string, blank = false)
regexp = ''
format_string.scan(/([^%]*)(?:%([A-Za-z%]))?/) do |literal, variable|
regexp << Regexp.quote(literal)
if variable
if TIMESTAMP_PARTS.key?(variable)
regexp << TIMESTAMP_PARTS[variable]
else
fail "Unknown variable: %#{variable}"
end
end
end
add_blank_option(Regexp.new(regexp), blank)
end
# Construct a regular expression to parse IPv4 and IPv6 addresses.
#
# Allow nil values if the blank option is given. This can be true to
# allow an empty string or to a string substitute for the nil value.
def ip_address(blank = false)
# IP address regexp copied from Resolv::IPv4 and Resolv::IPv6,
# but adjusted to work for the purpose of request-log-analyzer.
ipv4_regexp = /\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/
ipv6_regex_8_hex = /(?:[0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4}/
ipv6_regex_compressed_hex = /(?:(?:[0-9A-Fa-f]{1,4}(?::[0-9A-Fa-f]{1,4})*)?)::(?:(?:[0-9A-Fa-f]{1,4}(?::[0-9A-Fa-f]{1,4})*)?)/
ipv6_regex_6_hex_4_dec = /(?:(?:[0-9A-Fa-f]{1,4}:){6})#{ipv4_regexp}/
ipv6_regex_compressed_hex_4_dec = /(?:(?:[0-9A-Fa-f]{1,4}(?::[0-9A-Fa-f]{1,4})*)?)::(?:(?:[0-9A-Fa-f]{1,4}:)*)#{ipv4_regexp}/
ipv6_regexp = Regexp.union(ipv6_regex_8_hex, ipv6_regex_compressed_hex, ipv6_regex_6_hex_4_dec, ipv6_regex_compressed_hex_4_dec)
add_blank_option(Regexp.union(ipv4_regexp, ipv6_regexp), blank)
end
def anchored(regexp)
/^#{regexp}$/
end
protected
# Allow the field to be blank if this option is given. This can be true to
# allow an empty string or a string alternative for the nil value.
end
|
berkshelf/solve | lib/solve/ruby_solver.rb | Solve.RubySolver.resolve | ruby | def resolve(options = {})
@ui = options[:ui] if options[:ui]
solved_graph = resolve_with_error_wrapping
solution = solved_graph.map(&:payload)
unsorted_solution = solution.inject({}) do |stringified_soln, artifact|
stringified_soln[artifact.name] = artifact.version.to_s
stringified_soln
end
if options[:sorted]
build_sorted_solution(unsorted_solution)
else
unsorted_solution
end
end | @option options [Boolean] :sorted
return the solution as a sorted list instead of a Hash
@return [Hash, List] Returns a hash like { "Artifact Name" => "Version",... }
unless the :sorted option is true, then it returns a list like [["Artifact Name", "Version],...]
@raise [Errors::NoSolutionError] when the demands cannot be met for the
given graph.
@raise [Errors::UnsortableSolutionError] when the :sorted option is true
and the demands have a solution, but the solution contains a cyclic
dependency | train | https://github.com/berkshelf/solve/blob/a0e03ede13e2f66b8dd6d0d34c9c9db70fba94d2/lib/solve/ruby_solver.rb#L73-L90 | class RubySolver
class << self
# The timeout (in seconds) to use when resolving graphs. Default is 10. This can be
# configured by setting the SOLVE_TIMEOUT environment variable.
#
# @return [Integer]
def timeout
seconds = 30 unless ( seconds = ENV["SOLVE_TIMEOUT"] )
seconds.to_i * 1_000
end
# For optional solver engines, this attempts to load depenencies. The
# RubySolver is a non-optional component, so this is a no-op
def activate
true
end
end
# Graph object with references to all known artifacts and dependency
# constraints.
#
# @return [Solve::Graph]
attr_reader :graph
# @example Demands are Arrays of Arrays with an artifact name and optional constraint:
# [['nginx', '= 1.0.0'], ['mysql']]
# @return [Array<String>, Array<Array<String, String>>] demands
attr_reader :demands_array
# @example Basic use:
# graph = Solve::Graph.new
# graph.artifacts("mysql", "1.2.0")
# demands = [["mysql"]]
# RubySolver.new(graph, demands)
# @param [Solve::Graph] graph
# @param [Array<String>, Array<Array<String, String>>] demands
def initialize(graph, demands, options = {})
@graph = graph
@demands_array = demands
@timeout_ms = self.class.timeout
@ui = options[:ui] # could be nil, but that's okay
@dependency_source = options[:dependency_source] || "user-specified dependency"
@molinillo_graph = Molinillo::DependencyGraph.new
@resolver = Molinillo::Resolver.new(self, self)
end
# The problem demands given as Demand model objects
# @return [Array<Solve::Demand>]
def demands
demands_array.map do |name, constraint|
Demand.new(self, name, constraint)
end
end
# @option options [Boolean] :sorted
# return the solution as a sorted list instead of a Hash
#
# @return [Hash, List] Returns a hash like { "Artifact Name" => "Version",... }
# unless the :sorted option is true, then it returns a list like [["Artifact Name", "Version],...]
# @raise [Errors::NoSolutionError] when the demands cannot be met for the
# given graph.
# @raise [Errors::UnsortableSolutionError] when the :sorted option is true
# and the demands have a solution, but the solution contains a cyclic
# dependency
###
# Molinillo Callbacks
#
# Molinillo calls back to this class to get information about our
# dependency model objects. An abstract implementation is provided at
# https://github.com/CocoaPods/Molinillo/blob/master/lib/molinillo/modules/specification_provider.rb
#
###
# Callback required by Molinillo, called when the solve starts
# @return [Integer]
def progress_rate
1
end
# Callback required by Molinillo, called when the solve starts
# @return nil
def before_resolution
@ui.say("Starting dependency resolution") if @ui
end
# Callback required by Molinillo, called when the solve is complete.
# @return nil
def after_resolution
@ui.say("Finished dependency resolution") if @ui
end
# Callback required by Molinillo, called when resolving every progress_rate
# @return nil
def indicate_progress
nil
end
# Callback required by Molinillo, gives debug information about the solution
# @return nil
def debug(current_resolver_depth = 0)
# debug info will be returned if you call yield here, but it seems to be
# broken in current Molinillo
@ui.say(yield) if @ui
end
include Molinillo::SpecificationProvider
# Callback required by Molinillo
# Search for the specifications that match the given dependency.
# The specifications in the returned array will be considered in reverse
# order, so the latest version ought to be last.
# @note This method should be 'pure', i.e. the return value should depend
# only on the `dependency` parameter.
#
# @param [Object] dependency
# @return [Array<Solve::Artifact>] the artifacts that match the dependency.
def search_for(dependency)
# This array gets mutated by Molinillo; it's okay because sort returns a
# new array.
graph.versions(dependency.name, dependency.constraint).sort
end
# Callback required by Molinillo
# Returns the dependencies of `specification`.
# @note This method should be 'pure', i.e. the return value should depend
# only on the `specification` parameter.
#
# @param [Object] specification
# @return [Array<Solve::Dependency>] the dependencies of the given artifact
def dependencies_for(specification)
specification.dependencies
end
# Callback required by Molinillo
# Determines whether the given `requirement` is satisfied by the given
# `spec`, in the context of the current `activated` dependency graph.
#
# @param [Object] requirement
# @param [DependencyGraph] activated the current dependency graph in the
# resolution process.
# @param [Object] spec
# @return [Boolean] whether `requirement` is satisfied by `spec` in the
# context of the current `activated` dependency graph.
def requirement_satisfied_by?(requirement, activated, spec)
version = spec.version
return false unless requirement.constraint.satisfies?(version)
shared_possibility_versions = possibility_versions(requirement, activated)
return false if !shared_possibility_versions.empty? && !shared_possibility_versions.include?(version)
true
end
# Searches the current dependency graph to find previously activated
# requirements for the current artifact.
#
# @param [Object] requirement
# @param [DependencyGraph] activated the current dependency graph in the
# resolution process.
# @return [Array<Semverse::Version> the list of currently activated versions
# of this requirement
def possibility_versions(requirement, activated)
activated.vertices.values.flat_map do |vertex|
next unless vertex.payload
next unless vertex.name == requirement.name
if vertex.payload.respond_to?(:possibilities)
vertex.payload.possibilities.map(&:version)
else
vertex.payload.version
end
end.compact
end
private :possibility_versions
# Callback required by Molinillo
# Returns the name for the given `dependency`.
# @note This method should be 'pure', i.e. the return value should depend
# only on the `dependency` parameter.
#
# @param [Object] dependency
# @return [String] the name for the given `dependency`.
def name_for(dependency)
dependency.name
end
# Callback required by Molinillo
# @return [String] the name of the source of explicit dependencies, i.e.
# those passed to {Resolver#resolve} directly.
def name_for_explicit_dependency_source
@dependency_source
end
# Callback required by Molinillo
# Sort dependencies so that the ones that are easiest to resolve are first.
# Easiest to resolve is (usually) defined by:
# 1) Is this dependency already activated?
# 2) How relaxed are the requirements?
# 3) Are there any conflicts for this dependency?
# 4) How many possibilities are there to satisfy this dependency?
#
# @param [Array<Object>] dependencies
# @param [DependencyGraph] activated the current dependency graph in the
# resolution process.
# @param [{String => Array<Conflict>}] conflicts
# @return [Array<Solve::Dependency>] the dependencies sorted by preference.
def sort_dependencies(dependencies, activated, conflicts)
dependencies.sort_by do |dependency|
name = name_for(dependency)
[
activated.vertex_named(name).payload ? 0 : 1,
conflicts[name] ? 0 : 1,
search_for(dependency).count,
]
end
end
# Callback required by Molinillo
# Returns whether this dependency, which has no possible matching
# specifications, can safely be ignored.
#
# @param [Object] dependency
# @return [Boolean] whether this dependency can safely be skipped.
def allow_missing?(dependency)
false
end
private
def resolve_with_error_wrapping
@resolver.resolve(demands, @molinillo_graph)
rescue Molinillo::VersionConflict, Molinillo::CircularDependencyError => e
raise Solve::Errors::NoSolutionError.new(e.message)
end
def build_sorted_solution(unsorted_solution)
nodes = Hash.new
unsorted_solution.each do |name, version|
nodes[name] = @graph.artifact(name, version).dependencies.map(&:name)
end
# Modified from http://ruby-doc.org/stdlib-1.9.3/libdoc/tsort/rdoc/TSort.html
class << nodes
include TSort
alias tsort_each_node each_key
def tsort_each_child(node, &block)
fetch(node).each(&block)
end
end
begin
sorted_names = nodes.tsort
rescue TSort::Cyclic => e
raise Solve::Errors::UnsortableSolutionError.new(e, unsorted_solution)
end
sorted_names.map do |artifact|
[artifact, unsorted_solution[artifact]]
end
end
end
|
bottleneckco/playoverwatch-scraper | lib/playoverwatch-scraper/scraper.rb | PlayOverwatch.Scraper.main_qp | ruby | def main_qp
hero_img = hidden_mains_style.content.scan(/\.quickplay {.+?url\((.+?)\);/mis).flatten.first
hero_img.scan(/\/hero\/(.+?)\/career/i).flatten.first
end | Retrieve player's main Quick Play hero, in lowercase form. | train | https://github.com/bottleneckco/playoverwatch-scraper/blob/7909bdda3cefe15a5f4718e122943146365a01e1/lib/playoverwatch-scraper/scraper.rb#L49-L52 | class Scraper
##
# Creates a scraper with a specified battle tag.
# The +battle_tag+ can be in the hex (#) or hyphenated (-) format. It IS case sensitive.
def initialize(battle_tag)
@player_page = Nokogiri::HTML(open("https://playoverwatch.com/en-us/career/pc/#{battle_tag.gsub(/#/, '-')}", "User-Agent" => CHROME_USER_AGENT))
@player_data = JSON.parse open("https://playoverwatch.com/en-us/search/account-by-name/#{battle_tag.gsub(/#/, '-').gsub(/-/, '%23')}", "User-Agent" => CHROME_USER_AGENT).read
end
##
# Retrieve the player's player icon. Returns an image URL.
def player_icon
@player_page.css('img.player-portrait').first["src"]
end
##
# Retrieve a player's level
def player_level
@player_data.first['level'].to_i
end
##
# Retrieve a player's endorsement level
def endorsement_level
@player_page.css('.endorsement-level .u-center').first.content.to_i
end
##
# Retrieve a player's current competitive season ranking.
# Returns -1 if player did not complete placements.
def sr
comp_div = @player_page.css('.competitive-rank > .h5')
return -1 if comp_div.empty?
content = comp_div.first.content
content.to_i if Integer(content) rescue -1
end
##
# Retrieve player's main Quick Play hero, in lowercase form.
##
# Retrieve player's main Competitive hero, in lowercase form.
# You should check if the sr is -1 before attempting to call this.
def main_comp
hero_img = hidden_mains_style.content.scan(/\.competitive {.+?url\((.+?)\);/mis).flatten.first
hero_img.scan(/\/hero\/(.+?)\/career/i).flatten.first
end
private
def rank_map
JSON.parse File.read(File.expand_path('./ranks.json', __dir__))
end
def hidden_mains_style
@player_page.css('style').first
end
end
|
projectcypress/health-data-standards | lib/hqmf-parser/2.0/types.rb | HQMF2.TemporalReference.to_model | ruby | def to_model
rm = range ? range.to_model : nil
HQMF::TemporalReference.new(type, reference.to_model, rm)
end | Generates this classes hqmf-model equivalent | train | https://github.com/projectcypress/health-data-standards/blob/252d4f0927c513eacde6b9ea41b76faa1423c34b/lib/hqmf-parser/2.0/types.rb#L363-L366 | class TemporalReference
include HQMF2::Utilities
attr_reader :type, :reference, :range
# Use updated mappings to HDS temporal reference types (as used in SimpleXML Parser)
# https://github.com/projecttacoma/simplexml_parser/blob/fa0f589d98059b88d77dc3cb465b62184df31671/lib/model/types.rb#L167
UPDATED_TYPES = {
'EAOCW' => 'EACW',
'EAEORECW' => 'EACW',
'EAOCWSO' => 'EACWS',
'EASORECWS' => 'EACWS',
'EBOCW' => 'EBCW',
'EBEORECW' => 'EBCW',
'EBOCWSO' => 'EBCWS',
'EBSORECWS' => 'EBCWS',
'ECWSO' => 'ECWS',
'SAOCWEO' => 'SACWE',
'SAEORSCWE' => 'SACWE',
'SAOCW' => 'SACW',
'SASORSCW' => 'SACW',
'SBOCWEO' => 'SBCWE',
'SBEORSCWE' => 'SBCWE',
'SBOCW' => 'SBCW',
'SBSORSCW' => 'SBCW',
'SCWEO' => 'SCWE',
'OVERLAPS' => 'OVERLAP'
}
def initialize(entry)
@entry = entry
@type = UPDATED_TYPES[attr_val('./@typeCode')] || attr_val('./@typeCode')
@reference = Reference.new(@entry.at_xpath('./*/cda:id', HQMF2::Document::NAMESPACES))
range_def = @entry.at_xpath('./qdm:temporalInformation/qdm:delta', HQMF2::Document::NAMESPACES)
@range = HQMF2::Range.new(range_def, 'IVL_PQ') if range_def
end
# Generates this classes hqmf-model equivalent
end
|
sup-heliotrope/sup | lib/sup/poll.rb | Redwood.PollManager.poll_from | ruby | def poll_from source, opts={}
debug "trying to acquire poll lock for: #{source}..."
if source.try_lock
begin
source.poll do |sym, args|
case sym
when :add
m = Message.build_from_source source, args[:info]
old_m = Index.build_message m.id
m.labels += args[:labels]
m.labels.delete :inbox if source.archived?
m.labels.delete :unread if source.read?
m.labels.delete :unread if m.source_marked_read? # preserve read status if possible
m.labels.each { |l| LabelManager << l }
m.labels = old_m.labels + (m.labels - [:unread, :inbox]) if old_m
m.locations = old_m.locations + m.locations if old_m
HookManager.run "before-add-message", :message => m
yield :add, m, old_m, args[:progress] if block_given?
Index.sync_message m, true
if Index.message_joining_killed? m
m.labels += [:killed]
Index.sync_message m, true
end
## We need to add or unhide the message when it either did not exist
## before at all or when it was updated. We do *not* add/unhide when
## the same message was found at a different location
if old_m
UpdateManager.relay self, :updated, m
elsif !old_m or not old_m.locations.member? m.location
UpdateManager.relay self, :added, m
end
when :delete
Index.each_message({:location => [source.id, args[:info]]}, false) do |m|
m.locations.delete Location.new(source, args[:info])
Index.sync_message m, false
if m.locations.size == 0
yield :delete, m, [source,args[:info]], args[:progress] if block_given?
Index.delete m.id
UpdateManager.relay self, :location_deleted, m
end
end
when :update
Index.each_message({:location => [source.id, args[:old_info]]}, false) do |m|
old_m = Index.build_message m.id
m.locations.delete Location.new(source, args[:old_info])
m.locations.push Location.new(source, args[:new_info])
## Update labels that might have been modified remotely
m.labels -= source.supported_labels?
m.labels += args[:labels]
yield :update, m, old_m if block_given?
Index.sync_message m, true
UpdateManager.relay self, :updated, m
end
end
end
rescue SourceError => e
warn "problem getting messages from #{source}: #{e.message}"
ensure
source.go_idle
source.unlock
end
else
debug "source #{source} is already being polled."
end
end | like Source#poll, but yields successive Message objects, which have their
labels and locations set correctly. The Messages are saved to or removed
from the index after being yielded. | train | https://github.com/sup-heliotrope/sup/blob/36f95462e3014c354c577d63a78ba030c4b84474/lib/sup/poll.rb#L197-L265 | class PollManager
include Redwood::Singleton
HookManager.register "before-add-message", <<EOS
Executes immediately before a message is added to the index.
Variables:
message: the new message
EOS
HookManager.register "before-poll", <<EOS
Executes immediately before a poll for new messages commences.
No variables.
EOS
HookManager.register "after-poll", <<EOS
Executes immediately after a poll for new messages completes.
Variables:
num: the total number of new messages added in this poll
num_inbox: the number of new messages added in this poll which
appear in the inbox (i.e. were not auto-archived).
num_total: the total number of messages
num_inbox_total: the total number of new messages in the inbox.
num_inbox_total_unread: the total number of unread messages in the inbox
num_updated: the total number of updated messages
num_deleted: the total number of deleted messages
labels: the labels that were applied
from_and_subj: an array of (from email address, subject) pairs
from_and_subj_inbox: an array of (from email address, subject) pairs for
only those messages appearing in the inbox
EOS
def initialize
@delay = $config[:poll_interval] || 300
@mutex = Mutex.new
@thread = nil
@last_poll = nil
@polling = Mutex.new
@poll_sources = nil
@mode = nil
@should_clear_running_totals = false
clear_running_totals # defines @running_totals
UpdateManager.register self
end
def poll_with_sources
@mode ||= PollMode.new
if HookManager.enabled? "before-poll"
HookManager.run("before-poll")
else
BufferManager.flash "Polling for new messages..."
end
num, numi, numu, numd, from_and_subj, from_and_subj_inbox, loaded_labels = @mode.poll
clear_running_totals if @should_clear_running_totals
@running_totals[:num] += num
@running_totals[:numi] += numi
@running_totals[:numu] += numu
@running_totals[:numd] += numd
@running_totals[:loaded_labels] += loaded_labels || []
if HookManager.enabled? "after-poll"
hook_args = { :num => num, :num_inbox => numi,
:num_total => @running_totals[:num], :num_inbox_total => @running_totals[:numi],
:num_updated => @running_totals[:numu],
:num_deleted => @running_totals[:numd],
:labels => @running_totals[:loaded_labels],
:from_and_subj => from_and_subj, :from_and_subj_inbox => from_and_subj_inbox,
:num_inbox_total_unread => lambda { Index.num_results_for :labels => [:inbox, :unread] } }
HookManager.run("after-poll", hook_args)
else
if @running_totals[:num] > 0
flash_msg = "Loaded #{@running_totals[:num].pluralize 'new message'}, #{@running_totals[:numi]} to inbox. " if @running_totals[:num] > 0
flash_msg += "Updated #{@running_totals[:numu].pluralize 'message'}. " if @running_totals[:numu] > 0
flash_msg += "Deleted #{@running_totals[:numd].pluralize 'message'}. " if @running_totals[:numd] > 0
flash_msg += "Labels: #{@running_totals[:loaded_labels].map{|l| l.to_s}.join(', ')}." if @running_totals[:loaded_labels].size > 0
BufferManager.flash flash_msg
else
BufferManager.flash "No new messages."
end
end
end
def poll
if @polling.try_lock
@poll_sources = SourceManager.usual_sources
num, numi = poll_with_sources
@polling.unlock
[num, numi]
else
debug "poll already in progress."
return
end
end
def poll_unusual
if @polling.try_lock
@poll_sources = SourceManager.unusual_sources
num, numi = poll_with_sources
@polling.unlock
[num, numi]
else
debug "poll_unusual already in progress."
return
end
end
def start
@thread = Redwood::reporting_thread("periodic poll") do
while true
sleep @delay / 2
poll if @last_poll.nil? || (Time.now - @last_poll) >= @delay
end
end
end
def stop
@thread.kill if @thread
@thread = nil
end
def do_poll
total_num = total_numi = total_numu = total_numd = 0
from_and_subj = []
from_and_subj_inbox = []
loaded_labels = Set.new
@mutex.synchronize do
@poll_sources.each do |source|
begin
yield "Loading from #{source}... "
rescue SourceError => e
warn "problem getting messages from #{source}: #{e.message}"
next
end
msg = ""
num = numi = numu = numd = 0
poll_from source do |action,m,old_m,progress|
if action == :delete
yield "Deleting #{m.id}"
loaded_labels.merge m.labels
numd += 1
elsif action == :update
yield "Message at #{m.source_info} is an update of an old message. Updating labels from #{old_m.labels.to_a * ','} => #{m.labels.to_a * ','}"
loaded_labels.merge m.labels
numu += 1
elsif action == :add
if old_m
new_locations = (m.locations - old_m.locations)
if not new_locations.empty?
yield "Message at #{new_locations[0].info} has changed its source location. Updating labels from #{old_m.labels.to_a * ','} => #{m.labels.to_a * ','}"
numu += 1
else
yield "Skipping already-imported message at #{m.locations[-1].info}"
end
else
yield "Found new message at #{m.source_info} with labels #{m.labels.to_a * ','}"
loaded_labels.merge m.labels
num += 1
from_and_subj << [m.from && m.from.longname, m.subj]
if (m.labels & [:inbox, :spam, :deleted, :killed]) == Set.new([:inbox])
from_and_subj_inbox << [m.from && m.from.longname, m.subj]
numi += 1
end
end
else fail
end
end
msg += "Found #{num} messages, #{numi} to inbox. " unless num == 0
msg += "Updated #{numu} messages. " unless numu == 0
msg += "Deleted #{numd} messages." unless numd == 0
yield msg unless msg == ""
total_num += num
total_numi += numi
total_numu += numu
total_numd += numd
end
loaded_labels = loaded_labels - LabelManager::HIDDEN_RESERVED_LABELS - [:inbox, :killed]
yield "Done polling; loaded #{total_num} new messages total"
@last_poll = Time.now
end
[total_num, total_numi, total_numu, total_numd, from_and_subj, from_and_subj_inbox, loaded_labels]
end
## like Source#poll, but yields successive Message objects, which have their
## labels and locations set correctly. The Messages are saved to or removed
## from the index after being yielded.
def handle_idle_update sender, idle_since; @should_clear_running_totals = false; end
def handle_unidle_update sender, idle_since; @should_clear_running_totals = true; clear_running_totals; end
def clear_running_totals; @running_totals = {:num => 0, :numi => 0, :numu => 0, :numd => 0, :loaded_labels => Set.new}; end
end
|
SamSaffron/message_bus | lib/message_bus/http_client.rb | MessageBus.HTTPClient.unsubscribe | ruby | def unsubscribe(channel, &callback)
if callback
@channels[channel].callbacks.delete(callback)
remove_channel(channel) if @channels[channel].callbacks.empty?
else
remove_channel(channel)
end
stop if @channels.empty?
@status
end | unsubscribes from a channel
@example Unsubscribing from a channel
client = MessageBus::HTTPClient.new('http://some.test.com')
callback = -> { |payload| puts payload }
client.subscribe("/test", &callback)
client.unsubscribe("/test")
If a callback is given, only the specific callback will be unsubscribed.
@example Unsubscribing a callback from a channel
client.unsubscribe("/test", &callback)
When the client does not have any channels left, it will stop polling and
waits until a new subscription is started.
@param channel [String] channel to unsubscribe
@yield [data, global_id, message_id] specific callback to unsubscribe
@return [Integer] the current status of the client | train | https://github.com/SamSaffron/message_bus/blob/90fba639eb5d332ca8e87fd35f1d603a5743076d/lib/message_bus/http_client.rb#L201-L211 | class HTTPClient
class InvalidChannel < StandardError; end
class MissingBlock < StandardError; end
attr_reader :channels,
:stats
attr_accessor :enable_long_polling,
:status,
:enable_chunked_encoding,
:min_poll_interval,
:max_poll_interval,
:background_callback_interval
CHUNK_SEPARATOR = "\r\n|\r\n".freeze
private_constant :CHUNK_SEPARATOR
STATUS_CHANNEL = "/__status".freeze
private_constant :STATUS_CHANNEL
STOPPED = 0
STARTED = 1
Stats = Struct.new(:failed, :success)
private_constant :Stats
# @param base_url [String] Base URL of the message_bus server to connect to
# @param enable_long_polling [Boolean] Enable long polling
# @param enable_chunked_encoding [Boolean] Enable chunk encoding
# @param min_poll_interval [Float, Integer] Min poll interval when long polling in seconds
# @param max_poll_interval [Float, Integer] Max poll interval when long polling in seconds.
# When requests fail, the client will backoff and this is the upper limit.
# @param background_callback_interval [Float, Integer] Interval to poll when
# when polling in seconds.
# @param headers [Hash] extra HTTP headers to be set on the polling requests.
#
# @return [Object] Instance of MessageBus::HTTPClient
def initialize(base_url, enable_long_polling: true,
enable_chunked_encoding: true,
min_poll_interval: 0.1,
max_poll_interval: 180,
background_callback_interval: 60,
headers: {})
@uri = URI(base_url)
@enable_long_polling = enable_long_polling
@enable_chunked_encoding = enable_chunked_encoding
@min_poll_interval = min_poll_interval
@max_poll_interval = max_poll_interval
@background_callback_interval = background_callback_interval
@headers = headers
@client_id = SecureRandom.hex
@channels = {}
@status = STOPPED
@mutex = Mutex.new
@stats = Stats.new(0, 0)
end
# Starts a background thread that polls the message bus endpoint
# for the given base_url.
#
# Intervals for long polling can be configured via min_poll_interval and
# max_poll_interval.
#
# Intervals for polling can be configured via background_callback_interval.
#
# @return [Object] Instance of MessageBus::HTTPClient
def start
@mutex.synchronize do
return if started?
@status = STARTED
thread = Thread.new do
begin
while started?
unless @channels.empty?
poll
@stats.success += 1
@stats.failed = 0
end
sleep interval
end
rescue StandardError => e
@stats.failed += 1
warn("#{e.class} #{e.message}: #{e.backtrace.join("\n")}")
sleep interval
retry
ensure
stop
end
end
thread.abort_on_exception = true
end
self
end
# Stops the client from polling the message bus endpoint.
#
# @return [Integer] the current status of the client
def stop
@status = STOPPED
end
# Subscribes to a channel which executes the given callback when a message
# is published to the channel
#
# @example Subscribing to a channel for message
# client = MessageBus::HTTPClient.new('http://some.test.com')
#
# client.subscribe("/test") do |payload, _message_id, _global_id|
# puts payload
# end
#
# A last_message_id may be provided.
# * -1 will subscribe to all new messages
# * -2 will recieve last message + all new messages
# * -3 will recieve last 2 message + all new messages
#
# @example Subscribing to a channel with `last_message_id`
# client.subscribe("/test", last_message_id: -2) do |payload|
# puts payload
# end
#
# @param channel [String] channel to listen for messages on
# @param last_message_id [Integer] last message id to start polling on.
#
# @yield [data, message_id, global_id]
# callback to be executed whenever a message is received
#
# @yieldparam data [Hash] data payload of the message received on the channel
# @yieldparam message_id [Integer] id of the message in the channel
# @yieldparam global_id [Integer] id of the message in the global backlog
# @yieldreturn [void]
#
# @return [Integer] the current status of the client
def subscribe(channel, last_message_id: nil, &callback)
raise InvalidChannel unless channel.to_s.start_with?("/")
raise MissingBlock unless block_given?
last_message_id = -1 if last_message_id && !last_message_id.is_a?(Integer)
@channels[channel] ||= Channel.new
channel = @channels[channel]
channel.last_message_id = last_message_id if last_message_id
channel.callbacks.push(callback)
start if stopped?
end
# unsubscribes from a channel
#
# @example Unsubscribing from a channel
# client = MessageBus::HTTPClient.new('http://some.test.com')
# callback = -> { |payload| puts payload }
# client.subscribe("/test", &callback)
# client.unsubscribe("/test")
#
# If a callback is given, only the specific callback will be unsubscribed.
#
# @example Unsubscribing a callback from a channel
# client.unsubscribe("/test", &callback)
#
# When the client does not have any channels left, it will stop polling and
# waits until a new subscription is started.
#
# @param channel [String] channel to unsubscribe
# @yield [data, global_id, message_id] specific callback to unsubscribe
#
# @return [Integer] the current status of the client
private
def stopped?
@status == STOPPED
end
def started?
@status == STARTED
end
def remove_channel(channel)
@channels.delete(channel)
end
def interval
if @enable_long_polling
if (failed_count = @stats.failed) > 2
(@min_poll_interval * 2**failed_count).clamp(
@min_poll_interval, @max_poll_interval
)
else
@min_poll_interval
end
else
@background_callback_interval
end
end
def poll
http = Net::HTTP.new(@uri.host, @uri.port)
http.use_ssl = true if @uri.scheme == 'https'
request = Net::HTTP::Post.new(request_path, headers)
request.body = poll_payload
if @enable_long_polling
buffer = ''
http.request(request) do |response|
response.read_body do |chunk|
unless chunk.empty?
buffer << chunk
process_buffer(buffer)
end
end
end
else
response = http.request(request)
notify_channels(JSON.parse(response.body))
end
end
def is_chunked?
!headers["Dont-Chunk"]
end
def process_buffer(buffer)
index = buffer.index(CHUNK_SEPARATOR)
if is_chunked?
return unless index
messages = buffer[0..(index - 1)]
buffer.slice!("#{messages}#{CHUNK_SEPARATOR}")
else
messages = buffer[0..-1]
buffer.slice!(messages)
end
notify_channels(JSON.parse(messages))
end
def notify_channels(messages)
messages.each do |message|
current_channel = message['channel']
if current_channel == STATUS_CHANNEL
message["data"].each do |channel_name, last_message_id|
if (channel = @channels[channel_name])
channel.last_message_id = last_message_id
end
end
else
@channels.each do |channel_name, channel|
next unless channel_name == current_channel
channel.last_message_id = message['message_id']
channel.callbacks.each do |callback|
callback.call(
message['data'],
channel.last_message_id,
message['global_id']
)
end
end
end
end
end
def poll_payload
payload = {}
@channels.each do |channel_name, channel|
payload[channel_name] = channel.last_message_id
end
payload.to_json
end
def request_path
"/message-bus/#{@client_id}/poll"
end
def headers
headers = {}
headers['Content-Type'] = 'application/json'
headers['X-Silence-logger'] = 'true'
if !@enable_long_polling || !@enable_chunked_encoding
headers['Dont-Chunk'] = 'true'
end
headers.merge!(@headers)
end
end
|
sup-heliotrope/sup | lib/sup/modes/thread_index_mode.rb | Redwood.ThreadIndexMode.actually_toggle_deleted | ruby | def actually_toggle_deleted t
if t.has_label? :deleted
t.remove_label :deleted
add_or_unhide t.first
UpdateManager.relay self, :undeleted, t.first
lambda do
t.apply_label :deleted
hide_thread t
UpdateManager.relay self, :deleted, t.first
end
else
t.apply_label :deleted
hide_thread t
UpdateManager.relay self, :deleted, t.first
lambda do
t.remove_label :deleted
add_or_unhide t.first
UpdateManager.relay self, :undeleted, t.first
end
end
end | returns an undo lambda | train | https://github.com/sup-heliotrope/sup/blob/36f95462e3014c354c577d63a78ba030c4b84474/lib/sup/modes/thread_index_mode.rb#L377-L397 | class ThreadIndexMode < LineCursorMode
DATE_WIDTH = Time::TO_NICE_S_MAX_LEN
MIN_FROM_WIDTH = 15
LOAD_MORE_THREAD_NUM = 20
HookManager.register "index-mode-size-widget", <<EOS
Generates the per-thread size widget for each thread.
Variables:
thread: The message thread to be formatted.
EOS
HookManager.register "index-mode-date-widget", <<EOS
Generates the per-thread date widget for each thread.
Variables:
thread: The message thread to be formatted.
EOS
HookManager.register "mark-as-spam", <<EOS
This hook is run when a thread is marked as spam
Variables:
thread: The message thread being marked as spam.
EOS
register_keymap do |k|
k.add :load_threads, "Load #{LOAD_MORE_THREAD_NUM} more threads", 'M'
k.add_multi "Load all threads (! to confirm) :", '!' do |kk|
kk.add :load_all_threads, "Load all threads (may list a _lot_ of threads)", '!'
end
k.add :read_and_archive, "Archive thread (remove from inbox) and mark read", 'A'
k.add :cancel_search, "Cancel current search", :ctrl_g
k.add :reload, "Refresh view", '@'
k.add :toggle_archived, "Toggle archived status", 'a'
k.add :toggle_starred, "Star or unstar all messages in thread", '*'
k.add :toggle_new, "Toggle new/read status of all messages in thread", 'N'
k.add :edit_labels, "Edit or add labels for a thread", 'l'
k.add :edit_message, "Edit message (drafts only)", 'e'
k.add :toggle_spam, "Mark/unmark thread as spam", 'S'
k.add :toggle_deleted, "Delete/undelete thread", 'd'
k.add :kill, "Kill thread (never to be seen in inbox again)", '&'
k.add :flush_index, "Flush all changes now", '$'
k.add :jump_to_next_new, "Jump to next new thread", :tab
k.add :reply, "Reply to latest message in a thread", 'r'
k.add :reply_all, "Reply to all participants of the latest message in a thread", 'G'
k.add :forward, "Forward latest message in a thread", 'f'
k.add :toggle_tagged, "Tag/untag selected thread", 't'
k.add :toggle_tagged_all, "Tag/untag all threads", 'T'
k.add :tag_matching, "Tag matching threads", 'g'
k.add :apply_to_tagged, "Apply next command to all tagged threads", '+', '='
k.add :join_threads, "Force tagged threads to be joined into the same thread", '#'
k.add :undo, "Undo the previous action", 'u'
end
def initialize hidden_labels=[], load_thread_opts={}
super()
@mutex = Mutex.new # covers the following variables:
@threads = []
@hidden_threads = {}
@size_widget_width = nil
@size_widgets = []
@date_widget_width = nil
@date_widgets = []
@tags = Tagger.new self
## these guys, and @text and @lines, are not covered
@load_thread = nil
@load_thread_opts = load_thread_opts
@hidden_labels = hidden_labels + LabelManager::HIDDEN_RESERVED_LABELS
@date_width = DATE_WIDTH
@interrupt_search = false
initialize_threads # defines @ts and @ts_mutex
update # defines @text and @lines
UpdateManager.register self
@save_thread_mutex = Mutex.new
@last_load_more_size = nil
to_load_more do |size|
next if @last_load_more_size == 0
load_threads :num => size,
:when_done => lambda { |num| @last_load_more_size = num }
end
end
def unsaved?; dirty? end
def lines; @text.length; end
def [] i; @text[i]; end
def contains_thread? t; @threads.include?(t) end
def reload
drop_all_threads
UndoManager.clear
BufferManager.draw_screen
load_threads :num => buffer.content_height
end
## open up a thread view window
def select t=nil, when_done=nil
t ||= cursor_thread or return
Redwood::reporting_thread("load messages for thread-view-mode") do
num = t.size
message = "Loading #{num.pluralize 'message body'}..."
BufferManager.say(message) do |sid|
t.each_with_index do |(m, *_), i|
next unless m
BufferManager.say "#{message} (#{i}/#{num})", sid if t.size > 1
m.load_from_source!
end
end
mode = ThreadViewMode.new t, @hidden_labels, self
BufferManager.spawn t.subj, mode
BufferManager.draw_screen
mode.jump_to_first_open if $config[:jump_to_open_message]
BufferManager.draw_screen # lame TODO: make this unnecessary
## the first draw_screen is needed before topline and botline
## are set, and the second to show the cursor having moved
t.remove_label :unread
Index.save_thread t
update_text_for_line curpos
UpdateManager.relay self, :read, t.first
when_done.call if when_done
end
end
def multi_select threads
threads.each { |t| select t }
end
## these two methods are called by thread-view-modes when the user
## wants to view the previous/next thread without going back to
## index-mode. we update the cursor as a convenience.
def launch_next_thread_after thread, &b
launch_another_thread thread, 1, &b
end
def launch_prev_thread_before thread, &b
launch_another_thread thread, -1, &b
end
def launch_another_thread thread, direction, &b
l = @lines[thread] or return
target_l = l + direction
t = @mutex.synchronize do
if target_l >= 0 && target_l < @threads.length
@threads[target_l]
end
end
if t # there's a next thread
set_cursor_pos target_l # move out of mutex?
select t, b
elsif b # no next thread. call the block anyways
b.call
end
end
def handle_single_message_labeled_update sender, m
## no need to do anything different here; we don't differentiate
## messages from their containing threads
handle_labeled_update sender, m
end
def handle_labeled_update sender, m
if(t = thread_containing(m))
l = @lines[t] or return
update_text_for_line l
elsif is_relevant?(m)
add_or_unhide m
end
end
def handle_simple_update sender, m
t = thread_containing(m) or return
l = @lines[t] or return
update_text_for_line l
end
%w(read unread archived starred unstarred).each do |state|
define_method "handle_#{state}_update" do |*a|
handle_simple_update(*a)
end
end
## overwrite me!
def is_relevant? m; false; end
def handle_added_update sender, m
add_or_unhide m
BufferManager.draw_screen
end
def handle_updated_update sender, m
t = thread_containing(m) or return
l = @lines[t] or return
@ts_mutex.synchronize do
@ts.delete_message m
@ts.add_message m
end
Index.save_thread t, sync_back = false
update_text_for_line l
end
def handle_location_deleted_update sender, m
t = thread_containing(m)
delete_thread t if t and t.first.id == m.id
@ts_mutex.synchronize do
@ts.delete_message m if t
end
update
end
def handle_single_message_deleted_update sender, m
@ts_mutex.synchronize do
return unless @ts.contains? m
@ts.remove_id m.id
end
update
end
def handle_deleted_update sender, m
t = @ts_mutex.synchronize { @ts.thread_for m }
return unless t
hide_thread t
update
end
def handle_killed_update sender, m
t = @ts_mutex.synchronize { @ts.thread_for m }
return unless t
hide_thread t
update
end
def handle_spammed_update sender, m
t = @ts_mutex.synchronize { @ts.thread_for m }
return unless t
hide_thread t
update
end
def handle_undeleted_update sender, m
add_or_unhide m
end
def handle_unkilled_update sender, m
add_or_unhide m
end
def undo
UndoManager.undo
end
def update
old_cursor_thread = cursor_thread
@mutex.synchronize do
## let's see you do THIS in python
@threads = @ts.threads.select { |t| !@hidden_threads.member?(t) }.select(&:has_message?).sort_by(&:sort_key)
@size_widgets = @threads.map { |t| size_widget_for_thread t }
@size_widget_width = @size_widgets.max_of { |w| w.display_length }
@date_widgets = @threads.map { |t| date_widget_for_thread t }
@date_widget_width = @date_widgets.max_of { |w| w.display_length }
end
set_cursor_pos @threads.index(old_cursor_thread)||curpos
regen_text
end
def edit_message
return unless(t = cursor_thread)
message, *_ = t.find { |m, *o| m.has_label? :draft }
if message
mode = ResumeMode.new message
BufferManager.spawn "Edit message", mode
else
BufferManager.flash "Not a draft message!"
end
end
## returns an undo lambda
def actually_toggle_starred t
if t.has_label? :starred # if ANY message has a star
t.remove_label :starred # remove from all
UpdateManager.relay self, :unstarred, t.first
lambda do
t.first.add_label :starred
UpdateManager.relay self, :starred, t.first
regen_text
end
else
t.first.add_label :starred # add only to first
UpdateManager.relay self, :starred, t.first
lambda do
t.remove_label :starred
UpdateManager.relay self, :unstarred, t.first
regen_text
end
end
end
def toggle_starred
t = cursor_thread or return
undo = actually_toggle_starred t
UndoManager.register "toggling thread starred status", undo, lambda { Index.save_thread t }
update_text_for_line curpos
cursor_down
Index.save_thread t
end
def multi_toggle_starred threads
UndoManager.register "toggling #{threads.size.pluralize 'thread'} starred status",
threads.map { |t| actually_toggle_starred t },
lambda { threads.each { |t| Index.save_thread t } }
regen_text
threads.each { |t| Index.save_thread t }
end
## returns an undo lambda
def actually_toggle_archived t
thread = t
pos = curpos
if t.has_label? :inbox
t.remove_label :inbox
UpdateManager.relay self, :archived, t.first
lambda do
thread.apply_label :inbox
update_text_for_line pos
UpdateManager.relay self,:unarchived, thread.first
end
else
t.apply_label :inbox
UpdateManager.relay self, :unarchived, t.first
lambda do
thread.remove_label :inbox
update_text_for_line pos
UpdateManager.relay self, :unarchived, thread.first
end
end
end
## returns an undo lambda
def actually_toggle_spammed t
thread = t
if t.has_label? :spam
t.remove_label :spam
add_or_unhide t.first
UpdateManager.relay self, :unspammed, t.first
lambda do
thread.apply_label :spam
self.hide_thread thread
UpdateManager.relay self,:spammed, thread.first
end
else
t.apply_label :spam
hide_thread t
UpdateManager.relay self, :spammed, t.first
lambda do
thread.remove_label :spam
add_or_unhide thread.first
UpdateManager.relay self,:unspammed, thread.first
end
end
end
## returns an undo lambda
def toggle_archived
t = cursor_thread or return
undo = actually_toggle_archived t
UndoManager.register "deleting/undeleting thread #{t.first.id}", undo, lambda { update_text_for_line curpos },
lambda { Index.save_thread t }
update_text_for_line curpos
Index.save_thread t
end
def multi_toggle_archived threads
undos = threads.map { |t| actually_toggle_archived t }
UndoManager.register "deleting/undeleting #{threads.size.pluralize 'thread'}", undos, lambda { regen_text },
lambda { threads.each { |t| Index.save_thread t } }
regen_text
threads.each { |t| Index.save_thread t }
end
def toggle_new
t = cursor_thread or return
t.toggle_label :unread
update_text_for_line curpos
cursor_down
Index.save_thread t
end
def multi_toggle_new threads
threads.each { |t| t.toggle_label :unread }
regen_text
threads.each { |t| Index.save_thread t }
end
def multi_toggle_tagged threads
@mutex.synchronize { @tags.drop_all_tags }
regen_text
end
def join_threads
## this command has no non-tagged form. as a convenience, allow this
## command to be applied to tagged threads without hitting ';'.
@tags.apply_to_tagged :join_threads
end
def multi_join_threads threads
@ts.join_threads threads or return
threads.each { |t| Index.save_thread t }
@tags.drop_all_tags # otherwise we have tag pointers to invalid threads!
update
end
def jump_to_next_new
n = @mutex.synchronize do
((curpos + 1) ... lines).find { |i| @threads[i].has_label? :unread } ||
(0 ... curpos).find { |i| @threads[i].has_label? :unread }
end
if n
## jump there if necessary
jump_to_line n unless n >= topline && n < botline
set_cursor_pos n
else
BufferManager.flash "No new messages."
end
end
def toggle_spam
t = cursor_thread or return
multi_toggle_spam [t]
end
## both spam and deleted have the curious characteristic that you
## always want to hide the thread after either applying or removing
## that label. in all thread-index-views except for
## label-search-results-mode, when you mark a message as spam or
## deleted, you want it to disappear immediately; in LSRM, you only
## see deleted or spam emails, and when you undelete or unspam them
## you also want them to disappear immediately.
def multi_toggle_spam threads
undos = threads.map { |t| actually_toggle_spammed t }
threads.each { |t| HookManager.run("mark-as-spam", :thread => t) }
UndoManager.register "marking/unmarking #{threads.size.pluralize 'thread'} as spam",
undos, lambda { regen_text }, lambda { threads.each { |t| Index.save_thread t } }
regen_text
threads.each { |t| Index.save_thread t }
end
def toggle_deleted
t = cursor_thread or return
multi_toggle_deleted [t]
end
## see comment for multi_toggle_spam
def multi_toggle_deleted threads
undos = threads.map { |t| actually_toggle_deleted t }
UndoManager.register "deleting/undeleting #{threads.size.pluralize 'thread'}",
undos, lambda { regen_text }, lambda { threads.each { |t| Index.save_thread t } }
regen_text
threads.each { |t| Index.save_thread t }
end
def kill
t = cursor_thread or return
multi_kill [t]
end
def flush_index
@flush_id = BufferManager.say "Flushing index..."
Index.save_index
BufferManager.clear @flush_id
end
## m-m-m-m-MULTI-KILL
def multi_kill threads
UndoManager.register "killing/unkilling #{threads.size.pluralize 'threads'}" do
threads.each do |t|
if t.toggle_label :killed
add_or_unhide t.first
else
hide_thread t
end
end.each do |t|
UpdateManager.relay self, :labeled, t.first
Index.save_thread t
end
regen_text
end
threads.each do |t|
if t.toggle_label :killed
hide_thread t
else
add_or_unhide t.first
end
end.each do |t|
# send 'labeled'... this might be more specific
UpdateManager.relay self, :labeled, t.first
Index.save_thread t
end
killed, unkilled = threads.partition { |t| t.has_label? :killed }.map(&:size)
BufferManager.flash "#{killed.pluralize 'thread'} killed, #{unkilled} unkilled"
regen_text
end
def cleanup
UpdateManager.unregister self
if @load_thread
@load_thread.kill
BufferManager.clear @mbid if @mbid
sleep 0.1 # TODO: necessary?
BufferManager.erase_flash
end
dirty_threads = @mutex.synchronize { (@threads + @hidden_threads.keys).select { |t| t.dirty? } }
fail "dirty threads remain" unless dirty_threads.empty?
super
end
def toggle_tagged
t = cursor_thread or return
@mutex.synchronize { @tags.toggle_tag_for t }
update_text_for_line curpos
cursor_down
end
def toggle_tagged_all
@mutex.synchronize { @threads.each { |t| @tags.toggle_tag_for t } }
regen_text
end
def tag_matching
query = BufferManager.ask :search, "tag threads matching (regex): "
return if query.nil? || query.empty?
query = begin
/#{query}/i
rescue RegexpError => e
BufferManager.flash "error interpreting '#{query}': #{e.message}"
return
end
@mutex.synchronize { @threads.each { |t| @tags.tag t if thread_matches?(t, query) } }
regen_text
end
def apply_to_tagged; @tags.apply_to_tagged; end
def edit_labels
thread = cursor_thread or return
speciall = (@hidden_labels + LabelManager::RESERVED_LABELS).uniq
old_labels = thread.labels
pos = curpos
keepl, modifyl = thread.labels.partition { |t| speciall.member? t }
user_labels = BufferManager.ask_for_labels :label, "Labels for thread: ", modifyl.sort_by {|x| x.to_s}, @hidden_labels
return unless user_labels
thread.labels = Set.new(keepl) + user_labels
user_labels.each { |l| LabelManager << l }
update_text_for_line curpos
UndoManager.register "labeling thread" do
thread.labels = old_labels
update_text_for_line pos
UpdateManager.relay self, :labeled, thread.first
Index.save_thread thread
end
UpdateManager.relay self, :labeled, thread.first
Index.save_thread thread
end
def multi_edit_labels threads
user_labels = BufferManager.ask_for_labels :labels, "Add/remove labels (use -label to remove): ", [], @hidden_labels
return unless user_labels
user_labels.map! { |l| (l.to_s =~ /^-/)? [l.to_s.gsub(/^-?/, '').to_sym, true] : [l, false] }
hl = user_labels.select { |(l,_)| @hidden_labels.member? l }
unless hl.empty?
BufferManager.flash "'#{hl}' is a reserved label!"
return
end
old_labels = threads.map { |t| t.labels.dup }
threads.each do |t|
user_labels.each do |(l, to_remove)|
if to_remove
t.remove_label l
else
t.apply_label l
LabelManager << l
end
end
UpdateManager.relay self, :labeled, t.first
end
regen_text
UndoManager.register "labeling #{threads.size.pluralize 'thread'}" do
threads.zip(old_labels).map do |t, old_labels|
t.labels = old_labels
UpdateManager.relay self, :labeled, t.first
Index.save_thread t
end
regen_text
end
threads.each { |t| Index.save_thread t }
end
def reply type_arg=nil
t = cursor_thread or return
m = t.latest_message
return if m.nil? # probably won't happen
m.load_from_source!
mode = ReplyMode.new m, type_arg
BufferManager.spawn "Reply to #{m.subj}", mode
end
def reply_all; reply :all; end
def forward
t = cursor_thread or return
m = t.latest_message
return if m.nil? # probably won't happen
m.load_from_source!
ForwardMode.spawn_nicely :message => m
end
def load_n_threads_background n=LOAD_MORE_THREAD_NUM, opts={}
return if @load_thread # todo: wrap in mutex
@load_thread = Redwood::reporting_thread("load threads for thread-index-mode") do
num = load_n_threads n, opts
opts[:when_done].call(num) if opts[:when_done]
@load_thread = nil
end
end
## TODO: figure out @ts_mutex in this method
def load_n_threads n=LOAD_MORE_THREAD_NUM, opts={}
@interrupt_search = false
@mbid = BufferManager.say "Searching for threads..."
ts_to_load = n
ts_to_load = ts_to_load + @ts.size unless n == -1 # -1 means all threads
orig_size = @ts.size
last_update = Time.now
@ts.load_n_threads(ts_to_load, opts) do |i|
if (Time.now - last_update) >= 0.25
BufferManager.say "Loaded #{i.pluralize 'thread'}...", @mbid
update
BufferManager.draw_screen
last_update = Time.now
end
::Thread.pass
break if @interrupt_search
end
@ts.threads.each { |th| th.labels.each { |l| LabelManager << l } }
update
BufferManager.clear @mbid if @mbid
@mbid = nil
BufferManager.draw_screen
@ts.size - orig_size
end
ignore_concurrent_calls :load_n_threads
def status
if (l = lines) == 0
"line 0 of 0"
else
"line #{curpos + 1} of #{l}"
end
end
def cancel_search
@interrupt_search = true
end
def load_all_threads
load_threads :num => -1
end
def load_threads opts={}
if opts[:num].nil?
n = ThreadIndexMode::LOAD_MORE_THREAD_NUM
else
n = opts[:num]
end
myopts = @load_thread_opts.merge({ :when_done => (lambda do |num|
opts[:when_done].call(num) if opts[:when_done]
if num > 0
BufferManager.flash "Found #{num.pluralize 'thread'}."
else
BufferManager.flash "No matches."
end
end)})
if opts[:background] || opts[:background].nil?
load_n_threads_background n, myopts
else
load_n_threads n, myopts
end
end
ignore_concurrent_calls :load_threads
def read_and_archive
return unless cursor_thread
thread = cursor_thread # to make sure lambda only knows about 'old' cursor_thread
was_unread = thread.labels.member? :unread
UndoManager.register "reading and archiving thread" do
thread.apply_label :inbox
thread.apply_label :unread if was_unread
add_or_unhide thread.first
Index.save_thread thread
end
cursor_thread.remove_label :unread
cursor_thread.remove_label :inbox
hide_thread cursor_thread
regen_text
Index.save_thread thread
end
def multi_read_and_archive threads
old_labels = threads.map { |t| t.labels.dup }
threads.each do |t|
t.remove_label :unread
t.remove_label :inbox
hide_thread t
end
regen_text
UndoManager.register "reading and archiving #{threads.size.pluralize 'thread'}" do
threads.zip(old_labels).each do |t, l|
t.labels = l
add_or_unhide t.first
Index.save_thread t
end
regen_text
end
threads.each { |t| Index.save_thread t }
end
def resize rows, cols
regen_text
super
end
protected
def add_or_unhide m
@ts_mutex.synchronize do
if (is_relevant?(m) || @ts.is_relevant?(m)) && [email protected]?(m)
@ts.load_thread_for_message m, @load_thread_opts
end
@hidden_threads.delete @ts.thread_for(m)
end
update
end
def thread_containing m; @ts_mutex.synchronize { @ts.thread_for m } end
## used to tag threads by query. this can be made a lot more sophisticated,
## but for right now we'll do the obvious this.
def thread_matches? t, query
t.subj =~ query || t.snippet =~ query || t.participants.any? { |x| x.longname =~ query }
end
def size_widget_for_thread t
HookManager.run("index-mode-size-widget", :thread => t) || default_size_widget_for(t)
end
def date_widget_for_thread t
HookManager.run("index-mode-date-widget", :thread => t) || default_date_widget_for(t)
end
def cursor_thread; @mutex.synchronize { @threads[curpos] }; end
def drop_all_threads
@tags.drop_all_tags
initialize_threads
update
end
def delete_thread t
@mutex.synchronize do
i = @threads.index(t) or return
@threads.delete_at i
@size_widgets.delete_at i
@date_widgets.delete_at i
@tags.drop_tag_for t
end
end
def hide_thread t
@mutex.synchronize do
i = @threads.index(t) or return
raise "already hidden" if @hidden_threads[t]
@hidden_threads[t] = true
@threads.delete_at i
@size_widgets.delete_at i
@date_widgets.delete_at i
@tags.drop_tag_for t
end
end
def update_text_for_line l
return unless l # not sure why this happens, but it does, occasionally
need_update = false
@mutex.synchronize do
# and certainly not sure why this happens..
#
# probably a race condition between thread modification and updating
# going on.
return if @threads[l].empty?
@size_widgets[l] = size_widget_for_thread @threads[l]
@date_widgets[l] = date_widget_for_thread @threads[l]
## if a widget size has increased, we need to redraw everyone
need_update =
(@size_widgets[l].size > @size_widget_width) or
(@date_widgets[l].size > @date_widget_width)
end
if need_update
update
else
@text[l] = text_for_thread_at l
buffer.mark_dirty if buffer
end
end
def regen_text
threads = @mutex.synchronize { @threads }
@text = threads.map_with_index { |t, i| text_for_thread_at i }
@lines = threads.map_with_index { |t, i| [t, i] }.to_h
buffer.mark_dirty if buffer
end
def authors; map { |m, *o| m.from if m }.compact.uniq; end
## preserve author order from the thread
def author_names_and_newness_for_thread t, limit=nil
new = {}
seen = {}
authors = t.map do |m, *o|
next unless m && m.from
new[m.from] ||= m.has_label?(:unread)
next if seen[m.from]
seen[m.from] = true
m.from
end.compact
result = []
authors.each do |a|
break if limit && result.size >= limit
name = if AccountManager.is_account?(a)
"me"
elsif t.authors.size == 1
a.mediumname
else
a.shortname
end
result << [name, new[a]]
end
if result.size == 1 && (author_and_newness = result.assoc("me"))
unless (recipients = t.participants - t.authors).empty?
result = recipients.collect do |r|
break if limit && result.size >= limit
name = (recipients.size == 1) ? r.mediumname : r.shortname
["(#{name})", author_and_newness[1]]
end
end
end
result
end
AUTHOR_LIMIT = 5
def text_for_thread_at line
t, size_widget, date_widget = @mutex.synchronize do
[@threads[line], @size_widgets[line], @date_widgets[line]]
end
starred = t.has_label? :starred
## format the from column
cur_width = 0
ann = author_names_and_newness_for_thread t, AUTHOR_LIMIT
from = []
ann.each_with_index do |(name, newness), i|
break if cur_width >= from_width
last = i == ann.length - 1
abbrev =
if cur_width + name.display_length > from_width
name.slice_by_display_length(from_width - cur_width - 1) + "."
elsif cur_width + name.display_length == from_width
name.slice_by_display_length(from_width - cur_width)
else
if last
name.slice_by_display_length(from_width - cur_width)
else
name.slice_by_display_length(from_width - cur_width - 1) + ","
end
end
cur_width += abbrev.display_length
if last && from_width > cur_width
abbrev += " " * (from_width - cur_width)
end
from << [(newness ? :index_new_color : (starred ? :index_starred_color : :index_old_color)), abbrev]
end
is_me = AccountManager.method(:is_account?)
directly_participated = t.direct_participants.any?(&is_me)
participated = directly_participated || t.participants.any?(&is_me)
subj_color =
if t.has_label?(:draft)
:index_draft_color
elsif t.has_label?(:unread)
:index_new_color
elsif starred
:index_starred_color
elsif Colormap.sym_is_defined(:index_subject_color)
:index_subject_color
else
:index_old_color
end
size_padding = @size_widget_width - size_widget.display_length
size_widget_text = sprintf "%#{size_padding}s%s", "", size_widget
date_padding = @date_widget_width - date_widget.display_length
date_widget_text = sprintf "%#{date_padding}s%s", "", date_widget
[
[:tagged_color, @tags.tagged?(t) ? ">" : " "],
[:date_color, date_widget_text],
[:starred_color, (starred ? "*" : " ")],
] +
from +
[
[:size_widget_color, size_widget_text],
[:with_attachment_color , t.labels.member?(:attachment) ? "@" : " "],
[:to_me_color, directly_participated ? ">" : (participated ? '+' : " ")],
] +
(t.labels - @hidden_labels).sort_by {|x| x.to_s}.map {
|label| [Colormap.sym_is_defined("label_#{label}_color".to_sym) || :label_color, "#{label} "]
} +
[
[subj_color, t.subj + (t.subj.empty? ? "" : " ")],
[:snippet_color, t.snippet],
]
end
def dirty?; @mutex.synchronize { (@hidden_threads.keys + @threads).any? { |t| t.dirty? } } end
private
def default_size_widget_for t
case t.size
when 1
""
else
"(#{t.size})"
end
end
def default_date_widget_for t
t.date.getlocal.to_nice_s
end
def from_width
if buffer
[(buffer.content_width.to_f * 0.2).to_i, MIN_FROM_WIDTH].max
else
MIN_FROM_WIDTH # not sure why the buffer is gone
end
end
def initialize_threads
@ts = ThreadSet.new Index.instance, $config[:thread_by_subject]
@ts_mutex = Mutex.new
@hidden_threads = {}
end
end
|
trishume/pro | lib/pro/indexer.rb | Pro.Indexer.run_index_process | ruby | def run_index_process
readme, writeme = IO.pipe
p1 = fork {
# Stop cd function from blocking on fork
STDOUT.reopen(writeme)
readme.close
index_process unless File.exists?(INDEXER_LOCK_PATH)
}
Process.detach(p1)
end | spins off a background process to update the cache file | train | https://github.com/trishume/pro/blob/646098c7514eb5346dd2942f90b888c0de30b6ba/lib/pro/indexer.rb#L37-L47 | class Indexer
CACHE_PATH = File.expand_path("~/.proCache")
INDEXER_LOCK_PATH = File.expand_path("~/.proCacheLock")
def initialize
@base_dirs = find_base_dirs
@low_cpu = false
end
def index
# most of the time the cache should exist
if res = read_cache
# index in the background for next time.
run_index_process
else
STDERR.puts "Indexing... This should only happen after updating.".red
res = build_index
end
res
end
# unserializes the cache file and returns
# the index object
def read_cache
return nil unless File.readable_real?(CACHE_PATH)
index = YAML::load_file(CACHE_PATH)
return nil unless index.created_version == Pro::VERSION
return nil unless index.base_dirs == @base_dirs
index
end
# spins off a background process to update the cache file
def index_process
@low_cpu = true
# create lock so no work duplicated
begin
File.open(INDEXER_LOCK_PATH, "w") {}
build_index
ensure
File.delete(INDEXER_LOCK_PATH)
end
end
# scan the base directories for git repos
# and build an index then cache it
# returns an index
def build_index
index = scan_into_index
cache_index(index)
index
end
# serialize the index to a cache file
def cache_index(index)
# TODO: atomic rename. Right now we just hope.
File.open(CACHE_PATH, 'w' ) do |out|
YAML::dump( index, out )
end
end
# compile base directories and scan them
# use this info to create an index object
# and return it
def scan_into_index
repos = scan_bases
Index.new(repos,@base_dirs)
end
# add all git repos in all bases to the index
def scan_bases
bases = {}
@base_dirs.each do |base|
bases[base] = index_repos(base)
end
bases
end
# find all repos in a certain base directory
# returns an array of Repo objects
def index_repos(base)
if system("which find > /dev/null")
index_repos_fast(base)
else
index_repos_slow(base)
end
end
def index_repos_fast(base)
Dir.chdir(base)
git_paths = `find . -name .git`.lines
# additionally, index repos symlinked directly from a base root
dirs = `find -L . -maxdepth 1 -type d`.lines
symlinks = `find . -maxdepth 1 -type l`.lines
# intersect those two results
dir_sl = dirs & symlinks
dir_sl_git_paths = dir_sl.
map {|path| path.chomp + '/.git'}.
select {|path| File.exists?(path)}
# turn the command outputs into a list of repos
repos = []
(git_paths + dir_sl_git_paths).each do |git_path|
next if git_path.empty?
git_path = File.expand_path(git_path.chomp)
path = File.dirname(git_path)
repo_name = File.basename(path)
repos << Repo.new(repo_name,path)
end
repos
end
# recursive walk in ruby
def index_repos_slow(base)
STDERR.puts "WARNING: pro is indexing slowly, please install the 'find' command."
repos = []
Find.find(base) do |path|
target = path
# additionally, index repos symlinked directly from a base root
if FileTest.symlink?(path)
next if File.dirname(path) != base
target = File.readlink(path)
end
# dir must exist and be a git repo
if FileTest.directory?(target) && File.exists?(path+"/.git")
base_name = File.basename(path)
repos << Repo.new(base_name,path)
Find.prune
end
end
repos
end
# Finds the base directory where repos are kept
# Checks the environment variable PRO_BASE and the
# file .proBase
def find_base_dirs()
bases = []
# check environment first
base = ENV['PRO_BASE']
bases << base if base
# next check proBase file
path = ENV['HOME'] + "/.proBase"
if File.exists?(path)
# read lines of the pro base file
bases += IO.read(path).split("\n").map {|p| File.expand_path(p.strip)}
end
# strip bases that do not exist
# I know about select! but it doesn't exist in 1.8
bases = bases.select {|b| File.exists?(b)}
# if no bases then return home
bases << ENV['HOME'] if bases.empty?
bases
end
end
|
robertwahler/repo_manager | lib/repo_manager/actions/base_action.rb | RepoManager.BaseAction.help | ruby | def help(help_options={})
comment_starting_with = help_options[:comment_starting_with] || ""
located_in_file = help_options[:located_in_file] || __FILE__
text = File.read(located_in_file)
result = text.match(/(^\s*#\s*#{comment_starting_with}.*)^\s*class .* AppAction/m)
result = $1
result = result.gsub(/ @example/, '')
result = result.gsub(/ @return \[Number\]/, ' Exit code:')
result = result.gsub(/ @return .*/, '')
result = result.gsub(/ @see .*$/, '')
# strip the leading whitespace, the '#' and space
result = result.gsub(/^\s*# ?/, '')
# strip surrounding whitespace
result.strip
end | Convert method comments block to help text
@return [String] suitable for displaying on STDOUT | train | https://github.com/robertwahler/repo_manager/blob/d945f1cb6ac48b5689b633fcc029fd77c6a02d09/lib/repo_manager/actions/base_action.rb#L243-L260 | class BaseAction
# main configuration hash
attr_reader :configuration
# options hash, read from configuration hash
attr_reader :options
# args as passed on command line
attr_reader :args
# filename to template for rendering
attr_accessor :template
# filename to write output
attr_accessor :output
# numeric exit code set from return of process method
attr_reader :exit_code
# bin wrapper option parser object
attr_accessor :option_parser
def initialize(args=[], config={})
@configuration = config.deep_clone
@options = @configuration[:options] || {}
@args = args.dup
logger.debug "initialize with args: #{@args.inspect}"
end
# Parse generic action options for all decendant actions
#
# @return [OptionParser] for use by decendant actions
def parse_options(parser_configuration = {})
raise_on_invalid_option = parser_configuration.has_key?(:raise_on_invalid_option) ? parser_configuration[:raise_on_invalid_option] : true
parse_base_options = parser_configuration.has_key?(:parse_base_options) ? parser_configuration[:parse_base_options] : true
logger.debug "parsing args: #{@args.inspect}, raise_on_invalid_option: #{raise_on_invalid_option}, parse_base_options: #{parse_base_options}"
@option_parser ||= OptionParser.new
option_parser.banner = help + "\n\nOptions:"
if parse_base_options
option_parser.on("--template [NAME]", "Use a template to render output. (default=default.slim)") do |t|
options[:template] = t.nil? ? "default.slim" : t
@template = options[:template]
end
option_parser.on("--output FILENAME", "Render output directly to a file") do |f|
options[:output] = f
@output = options[:output]
end
option_parser.on("--force", "Overwrite file output without prompting") do |f|
options[:force] = f
end
option_parser.on("-r", "--repos a1,a2,a3", "--asset a1,a2,a3", "--filter a1,a2,a3", Array, "List of regex asset name filters") do |list|
options[:filter] = list
end
# NOTE: OptionParser will add short options, there is no way to stop '-m' from being the same as '--match'
option_parser.on("--match [MODE]", "Asset filter match mode. MODE=ALL (default), FIRST, EXACT, or ONE (fails if more than 1 match)") do |m|
options[:match] = m || "ALL"
options[:match].upcase!
unless ["ALL", "FIRST", "EXACT", "ONE"].include?(options[:match])
puts "invalid match mode option: #{options[:match]}"
exit 1
end
end
end
# allow decendants to add options
yield option_parser if block_given?
# reprocess args for known options, see binary wrapper for first pass
# (first pass doesn't know about action specific options), find all
# action options that may come after the action/subcommand (options
# before subcommand have already been processed) and its args
logger.debug "args before reprocessing: #{@args.inspect}"
begin
option_parser.order!(@args)
rescue OptionParser::InvalidOption => e
if raise_on_invalid_option
puts "option error: #{e}"
puts option_parser
exit 1
else
# parse and consume until we hit an unknown option (not arg), put it back so it
# can be shifted into the new array
e.recover(@args)
end
end
logger.debug "args before unknown collection: #{@args.inspect}"
unknown_args = []
while unknown_arg = @args.shift
logger.debug "unknown_arg: #{unknown_arg.inspect}"
unknown_args << unknown_arg
begin
# consume options and stop at an arg
option_parser.order!(@args)
rescue OptionParser::InvalidOption => e
if raise_on_invalid_option
puts "option error: #{e}"
puts option_parser
exit 1
else
# parse and consume until we hit an unknown option (not arg), put it back so it
# can be shifted into the new array
e.recover(@args)
end
end
end
logger.debug "args after unknown collection: #{@args.inspect}"
@args = unknown_args.dup
logger.debug "args after reprocessing: #{@args.inspect}"
logger.debug "configuration after reprocessing: #{@configuration.inspect}"
logger.debug "options after reprocessing: #{@options.inspect}"
option_parser
end
def execute
before_execute
parse_options
@exit_code = process
after_execute
@exit_code
end
# handle "assets to items" transformations, if any, and write to output
def process
write_to_output(render)
end
# TODO: add exception handler and pass return values
def write_to_output(content)
if output
logger.debug "write_to_output called with output : #{output}"
if overwrite_output?
logger.debug "writing output to : #{output}"
File.open(output, 'wb') {|f| f.write(content) }
else
logger.info "existing file not overwritten. To overwrite automatically, use the '--force' option."
end
else
logger.debug "writing to STDOUT"
print content
end
return 0
end
# TODO: create items/app_item class with at least the 'name' accessor
#
# assets: raw configuration handling system for items
def assets
return @assets if @assets
@assets = AssetManager.new.assets(asset_options)
end
# Used by asset factory to create assets. Override in app_action.rb or a
# descendant to set the class to be instantiated by by the AssetManager.
#
# @return [Symbol] asset type
def asset_type
:app_asset
end
# asset options separated from assets to make it easier to override assets
def asset_options
# include all base action options
result = options.deep_clone
# anything left on the command line should be filters as all options have
# been consumed, for pass through options, filters must be ignored by overwritting them
filters = args.dup
filters += result[:filter] if result[:filter]
result = result.merge(:filter => filters) unless filters.empty?
# asset type to create
type = result[:type] || asset_type
result = result.merge(:type => type)
# optional key: :assets_folder, absolute path or relative to config file if :base_folder is specified
result = result.merge(:assets_folder => configuration[:folders][:assets]) if configuration[:folders]
# optional key: :base_folder is the folder that contains the main config file
result = result.merge(:base_folder => File.dirname(configuration[:configuration_filename])) if configuration[:configuration_filename]
result
end
# items to be rendered, defaults to assets, override to suit
#
# @return [Array] of items to be rendered
def items
assets
end
# Render items result to a string
#
# @return [String] suitable for displaying on STDOUT or writing to a file
def render(view_options=configuration)
logger.debug "rendering"
result = ""
if template
logger.debug "rendering with template : #{template}"
view = AppView.new(items, view_options)
view.template = template
result = view.render
else
items.each_with_index do |item, index|
result += "\n" unless index == 0
result += item.name.green + ":\n"
if item.respond_to?(:attributes)
attributes = item.attributes.deep_clone
result += attributes.recursively_stringify_keys!.to_conf.gsub(/\s+$/, '') # strip trailing whitespace from YAML
result += "\n"
end
end
end
result
end
# Convert method comments block to help text
#
# @return [String] suitable for displaying on STDOUT
# @return [Boolean] true if output doesn't exist or it is OK to overwrite
def overwrite_output?
return true unless File.exists?(output)
if options[:force]
logger.debug "overwriting output with --force option"
return true
end
unless STDOUT.isatty
logger.debug "TTY not detected, skipping overwrite prompt"
return false
end
result = false
print "File '#{output}' exists. Would you like overwrite? [y/n]: "
case gets.strip
when 'Y', 'y', 'yes'
logger.debug "user answered yes to overwrite prompt"
result = true
else
logger.debug "user answered no to overwrite prompt"
end
result
end
# callbacks
def before_execute
logger.debug "callback: before_execute"
end
def after_execute
logger.debug "callback: after_execute"
end
end
|
DigitPaint/roger | lib/roger/resolver.rb | Roger.Resolver.split_path | ruby | def split_path(path)
path = path.to_s
extension = File.extname(path)[1..-1] || ""
path_without_extension = path.sub(/\.#{Regexp.escape(extension)}\Z/, "")
[extension, path_without_extension]
end | Split path in to extension an path without extension | train | https://github.com/DigitPaint/roger/blob/1153119f170d1b0289b659a52fcbf054df2d9633/lib/roger/resolver.rb#L191-L196 | class Resolver
# Maps output extensions to template extensions to find
# source files.
EXTENSION_MAP = {
"html" => %w(
rhtml
markdown
mkd
md
ad
adoc
asciidoc
rdoc
textile
),
"csv" => %w(
rcsv
),
# These are generic template languages
nil => %w(
erb
erubis
str
)
}.freeze
attr_reader :load_paths
def initialize(paths)
raise ArgumentError, "Resolver base path can't be nil" if paths.nil?
# Convert to paths
@load_paths = [paths].flatten.map { |p| Pathname.new(p) }
end
# @param [String] url The url to resolve to a path
# @param [Hash] options Options
#
# @option options [String] :prefer The preferred template extension. When searching for
# templates, the preferred template extension defines what file type we're requesting
# when we ask for a file without an extension
def find_template(url, options = {})
options = {
prefer: "html"
}.update(options)
orig_path, _qs, _anch = strip_query_string_and_anchor(url.to_s)
output = nil
load_paths.find do |load_path|
path = File.join(load_path, orig_path)
output = find_template_path(path, options)
end
output
end
alias url_to_path find_template
# Convert a disk path on file to an url
def path_to_url(path, relative_to = nil)
# Find the parent path we're in
path = Pathname.new(path).realpath
base = load_paths.find { |lp| path.to_s =~ /\A#{Regexp.escape(lp.realpath.to_s)}/ }
path = path.relative_path_from(base).cleanpath
if relative_to
relative_path_to_url(path, relative_to, base).to_s
else
"/#{path}"
end
end
def url_to_relative_url(url, relative_to_path)
# Skip if the url doesn't start with a / (but not with //)
return false unless url =~ %r{\A/[^/]}
path, qs, anch = strip_query_string_and_anchor(url)
# Get disk path
if true_path = url_to_path(path, exact_match: true)
path = path_to_url(true_path, relative_to_path)
path += qs if qs
path += anch if anch
path
else
false
end
end
def strip_query_string_and_anchor(url)
url = url.dup
# Strip off anchors
anchor = nil
url.gsub!(/(#.+)\Z/) do |r|
anchor = r
""
end
# Strip off query strings
query = nil
url.gsub!(/(\?.+)\Z/) do |r|
query = r
""
end
[url, query, anchor]
end
protected
# Finds the template path for "name"
def find_template_path(name, options = {})
options = {
prefer: "html", # Prefer a template with extension
}.update(options)
path = sanitize_name(name, options[:prefer])
# Exact match
return Pathname.new(path) if File.exist?(path)
# Split extension and path
path_extension, path_without_extension = split_path(path)
# Get possible output extensions for path_extension
template_extensions = template_extensions_for_output(path_extension, options[:prefer])
# Let's look at the disk to see what files we've got
files = Dir.glob(path_without_extension + ".*")
results = filter_files(files, path, path_without_extension, template_extensions)
if !results[0]
# No results found, but maybe there is a directory
# with the same name and it contains an index.XYZ
find_template_path(File.join(name, "index")) if File.directory?(name)
else
Pathname.new(results[0])
end
end
# Filter a list of files to see wether or not we can process them.
# Will take into account that the longest match with path will
# be the first result.
def filter_files(files, path, path_without_extension, template_extensions)
results = []
files.each do |file|
match = if file.start_with?(path)
path
else
path_without_extension
end
processable_extensions = file[(match.length + 1)..-1].split(".")
# All processable_extensions must be processable
# by a template_extension
next unless (processable_extensions - template_extensions).empty?
if file.start_with?(path)
# The whole path is found in the filename, not just
# the path without the extension.
# it must have priority over all else
results.unshift(file)
else
results.push(file)
end
end
results
end
# Check if the name is a directory and append index
# Append preferred extension or html if it doesn't have one yet
def sanitize_name(name, prefer = nil)
path = name.to_s
# Check if we haven't got an extension
# we'll assume you're looking for prefer or "html" otherwise
path += ".#{prefer || 'html'}" unless File.basename(path).include?(".")
path
end
# Split path in to extension an path without extension
def template_extensions_for_output(ext, prefer = nil)
template_extensions = []
# The preferred template_extension is first
template_extensions += prefer.to_s.split(".") if prefer
# Any exact template matches for extension
template_extensions += EXTENSION_MAP[ext] if EXTENSION_MAP[ext]
# Any generic templates
template_extensions += EXTENSION_MAP[nil]
# Myself to pass extension matching later on
template_extensions += [ext]
template_extensions
end
def relative_path_to_url(path, relative_to, base)
relative_to = Pathname.new(File.dirname(relative_to.to_s))
# If relative_to is an absolute path
if relative_to.absolute?
relative_to = relative_to.relative_path_from(base).cleanpath
end
Pathname.new("/" + path.to_s).relative_path_from(Pathname.new("/" + relative_to.to_s))
end
end
|
NullVoxPopuli/authorizable | lib/authorizable/cache.rb | Authorizable.Cache.set_for_role | ruby | def set_for_role(name: "", role: nil, value: nil)
if role
store[role] ||= {}
store[role][name] = value
else
store[name] = value
end
end | calculating the value of a permission is costly.
there are several Database lookups and lots of merging
of hashes.
once a permission is calculated, we'll store it here, so we don't
have to re-calculate/query/merge everything all over again
for both object access and page access, check if we've
already calculated the permission
the structure of this cache is the following:
{
role_1: {
permission1: true
permission2: false
},
authorization_permission_name: true
}
@param [String] name name of the permission
@param [Number] role role of the user
@param [Boolean] value | train | https://github.com/NullVoxPopuli/authorizable/blob/6a4ef94848861bb79b0ab1454264366aed4e2db8/lib/authorizable/cache.rb#L36-L43 | class Cache
def store
@permission_cache ||= {}
end
# calculating the value of a permission is costly.
# there are several Database lookups and lots of merging
# of hashes.
# once a permission is calculated, we'll store it here, so we don't
# have to re-calculate/query/merge everything all over again
#
# for both object access and page access, check if we've
# already calculated the permission
#
# the structure of this cache is the following:
# {
# role_1: {
# permission1: true
# permission2: false
# },
# authorization_permission_name: true
# }
#
# @param [String] name name of the permission
# @param [Number] role role of the user
# @param [Boolean] value
# @param [String] permission_name name of the permission
# @param [Number] role role of the user
# @return [Boolean] value of the previously stored permission
def get_for_role(permission_name, role = nil)
if role
store[role] ||= {}
store[role][permission_name]
else
store[permission_name]
end
end
end
|
robertwahler/repo_manager | lib/repo_manager/views/base_view.rb | RepoManager.BaseView.render | ruby | def render
raise "unable to find template file: #{template}" unless File.exists?(template)
extension = File.extname(template)
extension = extension.downcase if extension
case extension
when '.erb'
contents = File.open(template, "r") {|f| f.read}
ERB.new(contents, nil, '-').result(self.get_binding)
when '.slim'
Slim::Template.new(template, {:pretty => true}).render(self)
else
raise "unsupported template type based on file extension #{extension}"
end
end | TODO: render based on file ext | train | https://github.com/robertwahler/repo_manager/blob/d945f1cb6ac48b5689b633fcc029fd77c6a02d09/lib/repo_manager/views/base_view.rb#L97-L112 | class BaseView
def initialize(items, configuration={})
@configuration = configuration.deep_clone
@items = items
@template = File.expand_path('../templates/default.slim', __FILE__)
end
def configuration
@configuration
end
def items
@items
end
def template
return @template if @template.nil? || Pathname.new(@template).absolute?
# try relative to PWD
fullpath = File.expand_path(File.join(FileUtils.pwd, @template))
return fullpath if File.exists?(fullpath)
# try built in template folder
fullpath = File.expand_path(File.join('../templates', @template), __FILE__)
end
def template=(value)
@template = value
end
def title
@title || configuration[:title] || "Default Title"
end
def title=(value)
@title = value
end
def date
return @date if @date
if configuration[:date]
@date = Chronic.parse(configuration[:date])
return @date if @date
end
@date = Date.today
end
def date=(value)
@date = value
end
# ERB binding
def get_binding
binding
end
# render a partial
#
# filename: unless absolute, it will be relative to the main template
#
# @example slim escapes HTML, use '=='
#
# head
# == render 'mystyle.css'
#
# @return [String] of non-escaped textual content
def partial(filename)
filename = partial_path(filename)
raise "unable to find partial file: #{filename}" unless File.exists?(filename)
contents = File.open(filename, "rb") {|f| f.read}
# TODO: detect template EOL and match it to the partial's EOL
# force unix eol
contents.gsub!(/\r\n/, "\n") if contents.match("\r\n")
contents
end
# TODO: render based on file ext
private
# full expanded path to the given partial
#
def partial_path(filename)
return filename if filename.nil? || Pathname.new(filename).absolute?
# try relative to template
if template
base_folder = File.dirname(template)
filename = File.expand_path(File.join(base_folder, filename))
return filename if File.exists?(filename)
end
# try relative to PWD
filename = File.expand_path(File.join(FileUtils.pwd, filename))
return filename if File.exists?(filename)
# try built in template folder
filename = File.expand_path(File.join('../templates', filename), __FILE__)
end
end
|
projectcypress/health-data-standards | lib/hqmf-parser/2.0/data_criteria.rb | HQMF2.DataCriteria.basic_setup | ruby | def basic_setup
@status = attr_val('./*/cda:statusCode/@code')
@id_xpath = './*/cda:id/@extension'
@id = "#{attr_val('./*/cda:id/@extension')}_#{attr_val('./*/cda:id/@root')}"
@comments = @entry.xpath("./#{CRITERIA_GLOB}/cda:text/cda:xml/cda:qdmUserComments/cda:item/text()",
HQMF2::Document::NAMESPACES).map(&:content)
@code_list_xpath = './*/cda:code'
@value_xpath = './*/cda:value'
@is_derived_specific_occurrence_variable = false
simple_extractions = DataCriteriaBaseExtractions.new(@entry)
@template_ids = simple_extractions.extract_template_ids
@local_variable_name = simple_extractions.extract_local_variable_name
@temporal_references = simple_extractions.extract_temporal_references
@derivation_operator = simple_extractions.extract_derivation_operator
@children_criteria = simple_extractions.extract_child_criteria
@subset_operators = simple_extractions.extract_subset_operators
@negation, @negation_code_list_id = simple_extractions.extract_negation
end | Handles elments that can be extracted directly from the xml. Utilises the "BaseExtractions" class. | train | https://github.com/projectcypress/health-data-standards/blob/252d4f0927c513eacde6b9ea41b76faa1423c34b/lib/hqmf-parser/2.0/data_criteria.rb#L184-L201 | class DataCriteria
include HQMF2::Utilities, HQMF2::DataCriteriaTypeAndDefinitionExtraction, HQMF2::DataCriteriaPostProcessing
attr_accessor :id
attr_accessor :original_id
attr_reader :property, :type, :status, :value, :effective_time, :section
attr_reader :temporal_references, :subset_operators, :children_criteria
attr_reader :derivation_operator, :negation, :negation_code_list_id, :description
attr_reader :field_values, :source_data_criteria, :specific_occurrence_const
attr_reader :specific_occurrence, :comments, :is_derived_specific_occurrence_variable
attr_reader :entry, :definition, :variable, :local_variable_name
CRITERIA_GLOB = "*[substring(name(),string-length(name())-7) = \'Criteria\']"
# Create a new instance based on the supplied HQMF entry
# @param [Nokogiri::XML::Element] entry the parsed HQMF entry
def initialize(entry, data_criteria_references = {}, occurrences_map = {})
@entry = entry
@data_criteria_references = data_criteria_references
@occurrences_map = occurrences_map
basic_setup
@variable = DataCriteriaMethods.extract_variable(@local_variable_name, @id)
@field_values = DataCriteriaMethods.extract_field_values(@entry, @negation)
@description = extract_description
obtain_specific_and_source = SpecificOccurrenceAndSource.new(@entry, @id, @local_variable_name,
@data_criteria_references, @occurrences_map)
# Pulling these 5 variables out via destructing
@source_data_criteria,
@source_data_criteria_root,
@source_data_criteria_extension,
@specific_occurrence,
@specific_occurrence_const = obtain_specific_and_source.extract_specific_occurrences_and_source_data_criteria
extract_definition_from_template_or_type
post_processing
end
def to_s
props = {
property: property,
type: type,
status: status,
section: section
}
"DataCriteria#{props}"
end
# TODO: Remove id method if id attribute is sufficient
# Get the identifier of the criteria, used elsewhere within the document for referencing
# @return [String] the identifier of this data criteria
# def id
# attr_val(@id_xpath)
# end
# Get the title of the criteria, provides a human readable description
# @return [String] the title of this data criteria
def title
disp_value = attr_val("#{@code_list_xpath}/cda:displayName/@value")
@title || disp_value || @description || id # allow defined titles to take precedence
end
# Get the code list OID of the criteria, used as an index to the code list database
# @return [String] the code list identifier of this data criteria
def code_list_id
@code_list_id || attr_val("#{@code_list_xpath}/@valueSet")
end
# Generates this classes hqmf-model equivalent
def to_model
mv = value.try(:to_model)
met = effective_time.try(:to_model)
mtr = temporal_references.collect(&:to_model)
mso = subset_operators.collect(&:to_model)
field_values = retrieve_field_values_model_for_model
retrieve_title_and_description_for_model unless @variable || @derivation_operator
@code_list_id = nil if @derivation_operator
# prevent json model generation of empty children and comments
cc = children_criteria.present? ? children_criteria : nil
comments = @comments.present? ? @comments : nil
HQMF::DataCriteria.new(id, title, nil, description, @code_list_id, cc, derivation_operator, @definition, status,
mv, field_values, met, retrieve_code_system_for_model, @negation, @negation_code_list_id,
mtr, mso, @specific_occurrence, @specific_occurrence_const, @source_data_criteria,
comments, @variable)
end
# Return a new DataCriteria instance with only grouper attributes set.
# A grouper criteria allows multiple data criteria events to be contained in a single
# logical set (a union or intersection of these multiple events - i.e. A during (B or C or D)).
# Grouper criteria also provide a way to combine multiple criteria that reference a specific
# occurrence of an event.
def extract_variable_grouper
return unless @variable
@variable = false
@id = "GROUP_#{@id}"
if @children_criteria.length == 1 && @children_criteria[0] =~ /GROUP_/
reference_criteria = @data_criteria_references[@children_criteria.first]
return if reference_criteria.nil?
duplicate_child_info(reference_criteria)
@definition = reference_criteria.definition
@status = reference_criteria.status
@children_criteria = []
end
@specific_occurrence = nil
@specific_occurrence_const = nil
# set the source data criteria id to the id for variables
@source_data_criteria = @id
DataCriteria.new(@entry, @data_criteria_references, @occurrences_map).extract_as_grouper
end
# Extract this data criteria as a grouper data criteria
# SHOULD only be called on a variable data criteria instance
def extract_as_grouper
@field_values = {}
@temporal_references = []
@subset_operators = []
@derivation_operator = HQMF::DataCriteria::UNION
@definition = 'derived'
@status = nil
@children_criteria = ["GROUP_#{@id}"]
@source_data_criteria = @id
self
end
# Handle elements that are marked as variable groupers that should not be turned into a "holding element"
# (defined as a data criteria that encapsulates the calculation material for other data criteria elements,
# where the other data criteria elements reference the holding element as a child element)
def handle_derived_specific_occurrence_variable
# If the first child is all the exists, and it has been marked as a "group" element, switch this over to map to
# the new element.
if !@data_criteria_references["GROUP_#{@children_criteria.first}"].nil? && @children_criteria.length == 1
@children_criteria[0] = "GROUP_#{@children_criteria.first}"
# If the group element is not found, extract the information from the child and force it into the variable.
elsif @children_criteria.length == 1 && @children_criteria.first.present?
reference_criteria = @data_criteria_references[@children_criteria.first]
return if reference_criteria.nil?
duplicate_child_info(reference_criteria)
@children_criteria = reference_criteria.children_criteria
end
end
# clone method. This is needed because we need to extract a new source data criteria for variables
# typically "cloning" is done by re-parsing the xml entry, however with post processing that does
# not give us the correct SDC data when we are trying to recreate since we are looping back through
# the same data criteria before it has finished processing: See: DocUtilities.extract_source_data_criteria
def clone
other = DataCriteria.new(@entry, @data_criteria_references, @occurrences_map)
other.instance_variable_set(:@id, @id)
other.instance_variable_set(:@original_id, @original_id)
other.instance_variable_set(:@property, @property)
other.instance_variable_set(:@type, @type)
other.instance_variable_set(:@status, @status)
other.instance_variable_set(:@code_list_id, @code_list_id)
other.instance_variable_set(:@value, @value)
other.instance_variable_set(:@effective_time, @effective_time)
other.instance_variable_set(:@section, @section)
other.instance_variable_set(:@temporal_references, @temporal_references)
other.instance_variable_set(:@subset_operators, @subset_operators)
other.instance_variable_set(:@children_criteria, @children_criteria)
other.instance_variable_set(:@derivation_operator, @derivation_operator)
other.instance_variable_set(:@negation, @negation)
other.instance_variable_set(:@negation_code_list_id, @negation_code_list_id)
other.instance_variable_set(:@description, @description)
other.instance_variable_set(:@field_values, @field_values)
other.instance_variable_set(:@source_data_criteria, @source_data_criteria)
other.instance_variable_set(:@specific_occurrence_const, @specific_occurrence_const)
other.instance_variable_set(:@specific_occurrence, @specific_occurrence)
other.instance_variable_set(:@comments, @comments)
other.instance_variable_set(:@is_derived_specific_occurrence_variable, @is_derived_specific_occurrence_variable)
other.instance_variable_set(:@entry, @entry)
other.instance_variable_set(:@definition, @definition)
other.instance_variable_set(:@variable, @variable)
other.instance_variable_set(:@local_variable_name, @local_variable_name)
other
end
private
# Handles elments that can be extracted directly from the xml. Utilises the "BaseExtractions" class.
# Extract the description (with some special handling if this is a variable). The MAT has added an encoded
# form of the variable name in the localVariableName field which is used if available. If not, fall back
# to the extension.
def extract_description
if @variable
encoded_name = attr_val('./cda:localVariableName/@value')
encoded_name = DataCriteriaMethods.extract_description_for_variable(encoded_name) if encoded_name
return encoded_name if encoded_name.present?
attr_val("./#{CRITERIA_GLOB}/cda:id/@extension")
else
attr_val("./#{CRITERIA_GLOB}/cda:text/@value") ||
attr_val("./#{CRITERIA_GLOB}/cda:title/@value") ||
attr_val("./#{CRITERIA_GLOB}/cda:id/@extension")
end
end
# Extract the code system from the xml taht the document should use
def retrieve_code_system_for_model
code_system = attr_val("#{@code_list_xpath}/@codeSystem")
if code_system
code_system_name = HealthDataStandards::Util::CodeSystemHelper.code_system_for(code_system)
else
code_system_name = attr_val("#{@code_list_xpath}/@codeSystemName")
end
code_value = attr_val("#{@code_list_xpath}/@code")
{ code_system_name => [code_value] } if code_system_name && code_value
end
# Duplicates information from a child element to this data criteria if none exits.
# If the duplication requires that come values should be overwritten, do so only in the function calling this.
def duplicate_child_info(child_ref)
@title ||= child_ref.title
@type ||= child_ref.subset_operators
@definition ||= child_ref.definition
@status ||= child_ref.status
@code_list_id ||= child_ref.code_list_id
@temporal_references = child_ref.temporal_references if @temporal_references.empty?
@subset_operators ||= child_ref.subset_operators
@variable ||= child_ref.variable
@value ||= child_ref.value
end
# Generate the models of the field values
def retrieve_field_values_model_for_model
field_values = {}
@field_values.each_pair do |id, val|
field_values[id] = val.to_model
end
@code_list_id ||= code_list_id
# Model transfers as a field
if %w(transfer_to transfer_from).include? @definition
field_code_list_id = @code_list_id
@code_list_id = nil
unless field_code_list_id
field_code_list_id = attr_val("./#{CRITERIA_GLOB}/cda:outboundRelationship/#{CRITERIA_GLOB}/cda:value/@valueSet")
end
field_values[@definition.upcase] = HQMF::Coded.for_code_list(field_code_list_id, title)
end
return field_values unless field_values.empty?
end
# Generate the title and description used when producing the model
def retrieve_title_and_description_for_model
# drop "* Value Set" from titles
exact_desc = title.split(' ')[0...-3].join(' ')
# don't drop anything for patient characterstic titles
exact_desc = title if @definition.start_with?('patient_characteristic') && !title.end_with?('Value Set')
# remove * Value Set from title
title_match = title.match(/(.*) \w+ [Vv]alue [Ss]et/)
@title = title_match[1] if title_match && title_match.length > 1
@description = "#{@description}: #{exact_desc}"
end
end
|
jeremytregunna/ruby-trello | lib/trello/card.rb | Trello.Card.remove_upvote | ruby | def remove_upvote
begin
client.delete("/cards/#{id}/membersVoted/#{me.id}")
rescue Trello::Error => e
fail e unless e.message =~ /has not voted/i
end
self
end | Recind upvote. Noop if authenticated user hasn't previously voted | train | https://github.com/jeremytregunna/ruby-trello/blob/ad79c9d8152ad5395b3b61c43170908f1912bfb2/lib/trello/card.rb#L392-L400 | class Card < BasicData
register_attributes :id, :short_id, :name, :desc, :due, :due_complete, :closed, :url, :short_url,
:board_id, :member_ids, :list_id, :pos, :last_activity_date, :labels, :card_labels,
:cover_image_id, :badges, :card_members, :source_card_id, :source_card_properties,
readonly: [ :id, :short_id, :url, :short_url, :last_activity_date, :badges, :card_members ]
validates_presence_of :id, :name, :list_id
validates_length_of :name, in: 1..16384
validates_length_of :desc, in: 0..16384
include HasActions
SYMBOL_TO_STRING = {
id: 'id',
short_id: 'idShort',
name: 'name',
desc: 'desc',
due: 'due',
due_complete: 'dueComplete',
closed: 'closed',
url: 'url',
short_url: 'shortUrl',
board_id: 'idBoard',
member_ids: 'idMembers',
cover_image_id: 'idAttachmentCover',
list_id: 'idList',
pos: 'pos',
last_activity_date: 'dateLastActivity',
card_labels: 'idLabels',
labels: 'labels',
badges: 'badges',
card_members: 'members',
source_card_id: "idCardSource",
source_card_properties: "keepFromSource"
}
class << self
# Find a specific card by its id.
#
# @raise [Trello::Error] if the card could not be found.
#
# @return [Trello::Card]
def find(id, params = {})
client.find(:card, id, params)
end
# Create a new card and save it on Trello.
#
# If using source_card_id to duplicate a card, make sure to save
# the source card to Trello before calling this method to assure
# the correct data is used in the duplication.
#
# @param [Hash] options
# @option options [String] :name The name of the new card.
# @option options [String] :list_id ID of the list that the card should
# be added to.
# @option options [String] :desc A string with a
# length from 0 to 16384.
# @option options [String] :member_ids A comma-separated list of
# objectIds (24-character hex strings).
# @option options [String] :card_labels A comma-separated list of
# objectIds (24-character hex strings).
# @option options [Date] :due A date, or `nil`.
# @option options [String] :pos A position. `"top"`, `"bottom"`, or a
# positive number. Defaults to `"bottom"`.
# @option options [String] :source_card_id ID of the card to copy
# @option options [String] :source_card_properties A single, or array of,
# string properties to copy from source card.
# `"all"`, `"checklists"`, `"due"`, `"members"`, or `nil`.
# Defaults to `"all"`.
#
# @raise [Trello::Error] if the card could not be created.
#
# @return [Trello::Card]
def create(options)
client.create(:card,
'name' => options[:name],
'idList' => options[:list_id],
'desc' => options[:desc],
'idMembers' => options[:member_ids],
'idLabels' => options[:card_labels],
'due' => options[:due],
'due_complete' => options[:due_complete] || false,
'pos' => options[:pos],
'idCardSource' => options[:source_card_id],
'keepFromSource' => options.key?(:source_card_properties) ? options[:source_card_properties] : 'all'
)
end
end
# Update the fields of a card.
#
# Supply a hash of string keyed data retrieved from the Trello API representing
# a card.
#
# Note that this this method does not save anything new to the Trello API,
# it just assigns the input attributes to your local object. If you use
# this method to assign attributes, call `save` or `update!` afterwards if
# you want to persist your changes to Trello.
#
# @param [Hash] fields
# @option fields [String] :id
# @option fields [String] :short_id
# @option fields [String] :name The new name of the card.
# @option fields [String] :desc A string with a length from 0 to
# 16384.
# @option fields [Date] :due A date, or `nil`.
# @option fields [Boolean] :due_complete
# @option fields [Boolean] :closed
# @option fields [String] :url
# @option fields [String] :short_url
# @option fields [String] :board_id
# @option fields [String] :member_ids A comma-separated list of objectIds
# (24-character hex strings).
# @option fields [String] :pos A position. `"top"`, `"bottom"`, or a
# positive number. Defaults to `"bottom"`.
# @option fields [Array] :labels An Array of Trello::Label objects
# derived from the JSON response
# @option fields [String] :card_labels A comma-separated list of
# objectIds (24-character hex strings).
# @option fields [Object] :cover_image_id
# @option fields [Object] :badges
# @option fields [Object] :card_members
# @option fields [String] :source_card_id
# @option fields [Array] :source_card_properties
#
# @return [Trello::Card] self
def update_fields(fields)
attributes[:id] = fields[SYMBOL_TO_STRING[:id]] || attributes[:id]
attributes[:short_id] = fields[SYMBOL_TO_STRING[:short_id]] || attributes[:short_id]
attributes[:name] = fields[SYMBOL_TO_STRING[:name]] || fields[:name] || attributes[:name]
attributes[:desc] = fields[SYMBOL_TO_STRING[:desc]] || fields[:desc] || attributes[:desc]
attributes[:due] = Time.iso8601(fields[SYMBOL_TO_STRING[:due]]) rescue nil if fields.has_key?(SYMBOL_TO_STRING[:due])
attributes[:due] = fields[:due] if fields.has_key?(:due)
attributes[:due_complete] = fields[SYMBOL_TO_STRING[:due_complete]] if fields.has_key?(SYMBOL_TO_STRING[:due_complete])
attributes[:due_complete] ||= false
attributes[:closed] = fields[SYMBOL_TO_STRING[:closed]] if fields.has_key?(SYMBOL_TO_STRING[:closed])
attributes[:url] = fields[SYMBOL_TO_STRING[:url]] || attributes[:url]
attributes[:short_url] = fields[SYMBOL_TO_STRING[:short_url]] || attributes[:short_url]
attributes[:board_id] = fields[SYMBOL_TO_STRING[:board_id]] || attributes[:board_id]
attributes[:member_ids] = fields[SYMBOL_TO_STRING[:member_ids]] || fields[:member_ids] || attributes[:member_ids]
attributes[:list_id] = fields[SYMBOL_TO_STRING[:list_id]] || fields[:list_id] || attributes[:list_id]
attributes[:pos] = fields[SYMBOL_TO_STRING[:pos]] || fields[:pos] || attributes[:pos]
attributes[:labels] = (fields[SYMBOL_TO_STRING[:labels]] || []).map { |lbl| Trello::Label.new(lbl) }.presence || attributes[:labels].presence || []
attributes[:card_labels] = fields[SYMBOL_TO_STRING[:card_labels]] || fields[:card_labels] || attributes[:card_labels]
attributes[:last_activity_date] = Time.iso8601(fields[SYMBOL_TO_STRING[:last_activity_date]]) rescue nil if fields.has_key?(SYMBOL_TO_STRING[:last_activity_date])
attributes[:cover_image_id] = fields[SYMBOL_TO_STRING[:cover_image_id]] || attributes[:cover_image_id]
attributes[:badges] = fields[SYMBOL_TO_STRING[:badges]] || attributes[:badges]
attributes[:card_members] = fields[SYMBOL_TO_STRING[:card_members]] || attributes[:card_members]
attributes[:source_card_id] = fields[SYMBOL_TO_STRING[:source_card_id]] || fields[:source_card_id] || attributes[:source_card_id]
attributes[:source_card_properties] = fields[SYMBOL_TO_STRING[:source_card_properties]] || fields[:source_card_properties] || attributes[:source_card_properties]
self
end
# Returns a reference to the board this card is part of.
one :board, path: :boards, using: :board_id
# Returns a reference to the cover image attachment
one :cover_image, path: :attachments, using: :cover_image_id
# Returns a list of checklists associated with the card.
#
# The options hash may have a filter key which can have its value set as any
# of the following values:
# :filter => [ :none, :all ] # default :all
many :checklists, filter: :all
# Returns a list of plugins associated with the card
many :plugin_data, path: "pluginData"
# List of custom field values on the card, only the ones that have been set
many :custom_field_items, path: 'customFieldItems'
def check_item_states
states = CheckItemState.from_response client.get("/cards/#{self.id}/checkItemStates")
MultiAssociation.new(self, states).proxy
end
# Returns a reference to the list this card is currently in.
one :list, path: :lists, using: :list_id
# Returns a list of members who are assigned to this card.
#
# @return [Array<Trello::Member>]
def members
members = Member.from_response client.get("/cards/#{self.id}/members")
MultiAssociation.new(self, members).proxy
end
# Returns a list of members who have upvoted this card
# NOTE: this fetches a list each time it's called to avoid case where
# card is voted (or vote is removed) after card is fetched. Optimizing
# accuracy over network performance
#
# @return [Array<Trello::Member>]
def voters
Member.from_response client.get("/cards/#{id}/membersVoted")
end
# Saves a record.
#
# @raise [Trello::Error] if the card could not be saved
#
# @return [String] The JSON representation of the saved card returned by
# the Trello API.
def save
# If we have an id, just update our fields.
return update! if id
from_response client.post("/cards", {
name: name,
desc: desc,
idList: list_id,
idMembers: member_ids,
idLabels: card_labels,
pos: pos,
due: due,
dueComplete: due_complete,
idCardSource: source_card_id,
keepFromSource: source_card_properties
})
end
# Update an existing record.
#
# Warning: this updates all fields using values already in memory. If
# an external resource has updated these fields, you should refresh!
# this object before making your changes, and before updating the record.
#
# @raise [Trello::Error] if the card could not be updated.
#
# @return [String] The JSON representation of the updated card returned by
# the Trello API.
def update!
@previously_changed = changes
# extract only new values to build payload
payload = Hash[changes.map { |key, values| [SYMBOL_TO_STRING[key.to_sym].to_sym, values[1]] }]
@changed_attributes.clear
client.put("/cards/#{id}", payload)
end
# Delete this card
#
# @return [String] the JSON response from the Trello API
def delete
client.delete("/cards/#{id}")
end
# Check if the card is not active anymore.
def closed?
closed
end
# Close the card.
#
# This only marks your local copy card as closed. Use `close!` if you
# want to close the card and persist the change to the Trello API.
#
# @return [Boolean] always returns true
#
# @return [String] The JSON representation of the closed card returned by
# the Trello API.
def close
self.closed = true
end
def close!
close
save
end
# Is the record valid?
def valid?
name && list_id
end
# Add a comment with the supplied text.
def add_comment(text)
client.post("/cards/#{id}/actions/comments", text: text)
end
# Add a checklist to this card
def add_checklist(checklist)
client.post("/cards/#{id}/checklists", {
value: checklist.id
})
end
# create a new checklist and add it to this card
def create_new_checklist(name)
client.post("/cards/#{id}/checklists", { name: name })
end
# Move this card to the given list
def move_to_list(list)
list_number = list.is_a?(String) ? list : list.id
unless list_id == list_number
client.put("/cards/#{id}/idList", {
value: list_number
})
end
end
# Moves this card to the given list no matter which board it is on
def move_to_list_on_any_board(list_id)
list = List.find(list_id)
if board.id == list.board_id
move_to_list(list_id)
else
move_to_board(Board.find(list.board_id), list)
end
end
# Move this card to the given board (and optional list on this board)
def move_to_board(new_board, new_list = nil)
unless board_id == new_board.id
payload = { value: new_board.id }
payload[:idList] = new_list.id if new_list
client.put("/cards/#{id}/idBoard", payload)
end
end
# Add a member to this card
def add_member(member)
client.post("/cards/#{id}/members", {
value: member.id
})
end
# Remove a member from this card
def remove_member(member)
client.delete("/cards/#{id}/members/#{member.id}")
end
# Current authenticated user upvotes a card
def upvote
begin
client.post("/cards/#{id}/membersVoted", {
value: me.id
})
rescue Trello::Error => e
fail e unless e.message =~ /has already voted/i
end
self
end
# Recind upvote. Noop if authenticated user hasn't previously voted
# Add a label
def add_label(label)
unless label.valid?
errors.add(:label, "is not valid.")
return Trello.logger.warn "Label is not valid." unless label.valid?
end
client.post("/cards/#{id}/idLabels", {value: label.id})
end
# Remove a label
def remove_label(label)
unless label.valid?
errors.add(:label, "is not valid.")
return Trello.logger.warn "Label is not valid." unless label.valid?
end
client.delete("/cards/#{id}/idLabels/#{label.id}")
end
# Add an attachment to this card
def add_attachment(attachment, name = '')
# Is it a file object or a string (url)?
if attachment.respond_to?(:path) && attachment.respond_to?(:read)
client.post("/cards/#{id}/attachments", {
file: attachment,
name: name
})
else
client.post("/cards/#{id}/attachments", {
url: attachment,
name: name
})
end
end
# Retrieve a list of attachments
def attachments
attachments = Attachment.from_response client.get("/cards/#{id}/attachments")
MultiAssociation.new(self, attachments).proxy
end
# Remove an attachment from this card
def remove_attachment(attachment)
client.delete("/cards/#{id}/attachments/#{attachment.id}")
end
# :nodoc:
def request_prefix
"/cards/#{id}"
end
# Retrieve a list of comments
def comments
comments = Comment.from_response client.get("/cards/#{id}/actions", filter: "commentCard")
end
# Find the creation date
def created_at
@created_at ||= Time.at(id[0..7].to_i(16)) rescue nil
end
private
def me
@me ||= Member.find(:me)
end
end
|
Subsets and Splits