hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1dee32b4ce50cf9f550000ab6af023c58d537277 | 852 | # Copyright 2011-2015, The Trustees of Indiana University and Northwestern
# University. Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# --- END LICENSE_HEADER BLOCK ---
FactoryGirl.define do
factory :playlist do
user
title { Faker::Lorem.word }
comment { Faker::Lorem.sentence }
visibility { Playlist::PRIVATE }
end
end
| 37.043478 | 82 | 0.733568 |
26cb0fe7966a5e957d65cae49cb4435e6963ee7b | 356 | require 'travis/services/base'
module Travis
module Services
class FindRepo < Base
register :find_repo
def run(options = {})
result
end
def updated_at
result.try(:updated_at)
end
private
def result
@result ||= scope(:repository).find_by(params)
end
end
end
end
| 14.833333 | 56 | 0.573034 |
0830f835bf45076080ab3a16319b7ac975df8437 | 648 | # frozen_string_literal: true
# This file was auto-generated by lib/tasks/web.rake
module Slack
module Web
module Api
module Endpoints
module TeamProfile
#
# Retrieve a team's profile.
#
# @option options [Object] :visibility
# Filter by visibility.
# @see https://api.slack.com/methods/team.profile.get
# @see https://github.com/slack-ruby/slack-api-ref/blob/master/methods/team.profile/team.profile.get.json
def team_profile_get(options = {})
post('team.profile.get', options)
end
end
end
end
end
end
| 27 | 115 | 0.597222 |
edeba355bd30dc1c976ecdff549ab74c9ed4e519 | 801 | require "bundler/setup"
require "rails_matching"
require 'simplecov'
require 'codacy-coverage'
require 'pp'
require 'faker'
require "active_record"
require 'factory_girl_rails'
require_relative "support/factory_girl"
RSpec.configure do |config|
SimpleCov.start
Codacy::Reporter.start
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
config.expect_with :rspec do |c|
c.syntax = :expect
end
ActiveRecord::Base.establish_connection adapter: "sqlite3", database: ":memory:"
load File.dirname(__FILE__) + '/schema.rb'
require File.dirname(__FILE__) + '/models.rb'
Faker::Name.unique.clear # Clears used values for Faker::Name
Faker::UniqueGenerator.clear # Clears used values for all generators
end
| 22.885714 | 82 | 0.756554 |
abda0b6c686d3a8ff3b171b38f05285d7166d1b4 | 160 | # frozen_string_literal: true
class AddProductIdToReview < ActiveRecord::Migration[5.2]
def change
add_column(:reviews, :product_id, :integer)
end
end
| 20 | 57 | 0.7625 |
871211c9adae9242cffc7b2660ebcf28a27bb7d6 | 978 | # frozen_string_literal: true
class Shrine
module Plugins
# Documentation can be found on https://shrinerb.com/docs/plugins/upload_options
module UploadOptions
def self.configure(uploader, options = {})
uploader.opts[:upload_options] ||= {}
uploader.opts[:upload_options].merge!(options)
end
module InstanceMethods
private
def _upload(io, **options)
upload_options = get_upload_options(io, options)
super(io, **options, upload_options: upload_options)
end
def get_upload_options(io, options)
upload_options = opts[:upload_options][storage_key] || {}
upload_options = upload_options.call(io, options) if upload_options.respond_to?(:call)
upload_options = upload_options.merge(options[:upload_options]) if options[:upload_options]
upload_options
end
end
end
register_plugin(:upload_options, UploadOptions)
end
end
| 29.636364 | 101 | 0.671779 |
ed096c10863118149a5cc0c7ff1671907f2f247e | 125 | json.extract! event, :id, :title, :date, :location, :body, :created_at, :updated_at
json.url event_url(event, format: :json)
| 41.666667 | 83 | 0.72 |
21a4fdf97fe4e2a3025d7525e5bcf62d73fe913b | 1,358 | #
# Be sure to run `pod lib lint BCLKeyValueObservation.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# Any lines starting with a # are optional, but encouraged
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "BCLKeyValueObservation"
s.version = "0.1.0"
s.summary = "BCLKeyValueObservation is a thin abstraction on top of Apple's KVO system."
s.description = <<-DESC
BCLKeyValueObservation is a thin abstraction on top of Apple's KVO system. The goals of BCLKeyValueObservation are:
- Less boiler plate code (good bye observeValueForKeyPath:ofObject:change:context:)
- Improve clarity of functionality (imperiative method names)
- **Not** an excuse to have fun with the runtime
DESC
s.homepage = "https://github.com/benedictc/BCLKeyValueObservation"
s.license = 'MIT'
s.author = { "Benedict Cohen" => "[email protected]" }
s.source = { :git => "https://github.com/benedictc/BCLKeyValueObservation.git", :tag => "0.1.0" }
# s.social_media_url = 'https://twitter.com/benedictc'
s.platform = :ios, '5.0'
s.requires_arc = true
s.source_files = "BCLKeyValueObservation", "BCLKeyValueObservation/**/*.{h,m}"
end
| 43.806452 | 115 | 0.679676 |
f8cff785cdbf23641d651a0411f32c172872c454 | 4,396 | require File.join(File.dirname(__FILE__), 'spec_helper')
describe SData::Resource::Base do
describe ".initial_scope" do
context "given a resource with a baze class" do
before :all do
class TestModel < ActiveRecord::Base; end
class TestResource < SData::Resource::Base
self.baze_class = TestModel
end
end
context "when .initial_scope is called within class body" do
before :all do
TestResource.class_eval do
initial_scope do |user|
{ :conditions => { :created_by_id => user.id } }
end
end
end
it "should define a scope named :sdata_scope_for_context in a baze class" do
TestModel.should respond_to(:sdata_scope_for_context)
end
it "should pass given block to a scope" do
fake_user = stub('user', :id => 1)
scope = TestModel.sdata_scope_for_context(fake_user)
scope.current_scoped_methods[:find].should == { :conditions => { :created_by_id => 1 } }
end
end
end
end
describe "#registered_resources" do
context "when I inherit couple of classes" do
before :each do
class TradingAccount < SData::Resource::Base; end
class SalesInvoice < SData::Resource::Base; end
end
it "should give access to children by symbol" do
SData::Resource::Base.registered_resources[:trading_account].should == TradingAccount
SData::Resource::Base.registered_resources[:sales_invoice].should == SalesInvoice
end
context "when child classes are in namespaces" do
before :each do
module SData
module Contracts
module CrmErp
class PostalAddress < SData::Resource::Base; end
end
end
end
end
it "should give access to these children by keys without namespace" do
SData::Resource::Base.registered_resources[:postal_address].should == SData::Contracts::CrmErp::PostalAddress
end
end
describe "derived class" do
subject { TradingAccount }
it "should respond to SData-related class methods" do
subject.should respond_to(:sdata_resource_kind_url)
end
end
end
context "when I derive from Resource::Base incorrectly" do
it "should raise a reasonable error" do
lambda { ThisWillNotWork = Class.new(SData::Resource::Base) }.should raise_error('You should derive from SData::Resource::base explicitly in order to provide child class name')
end
end
end
describe ".has_sdata_options" do
context "when I derive from SData::Resource::Base" do
before :all do
class TradingAccount < SData::Resource::Base; end
end
it "should respond to .has_sdata_options" do
TradingAccount.should respond_to(:has_sdata_options)
end
context "when I call with a hash" do
before :all do
TradingAccount.class_eval { has_sdata_options :instance_id => :id }
end
it "should return given hash then" do
TradingAccount.sdata_options.should == { :instance_id => :id }
end
describe "sdata resource instance" do
before do
@resource_instance = TradingAccount.new
end
it "should respond to #sdata_options" do
@resource_instance.should respond_to(:sdata_options)
end
it "should return the same options" do
@resource_instance.sdata_options.should == { :instance_id => :id }
end
end
end
end
context "when two SData resources with different options" do
before :each do
class TradingAccount < SData::Resource::Base
has_sdata_options :value => 'TradingAccount'
end
class SalesInvoice < SData::Resource::Base
has_sdata_options :value => 'SalesInvoice'
end
end
it "should respond to #sdata_options" do
TradingAccount.should respond_to(:sdata_options)
SalesInvoice.should respond_to(:sdata_options)
end
it "should return correspondent value" do
TradingAccount.sdata_options.should == { :value => 'TradingAccount' }
SalesInvoice.sdata_options.should == { :value => 'SalesInvoice' }
end
end
end
end
| 32.087591 | 184 | 0.629208 |
01861b391654225bbda8b5c093ef326a2d6a5ab7 | 2,260 | # frozen_string_literal: true
require 'spec_helper'
describe Ci::Maskable do
let(:variable) { build(:ci_variable) }
describe 'masked value validations' do
subject { variable }
context 'when variable is masked' do
before do
subject.masked = true
end
it { is_expected.not_to allow_value('hello').for(:value) }
it { is_expected.not_to allow_value('hello world').for(:value) }
it { is_expected.not_to allow_value('hello$VARIABLEworld').for(:value) }
it { is_expected.not_to allow_value('hello\rworld').for(:value) }
it { is_expected.to allow_value('helloworld').for(:value) }
end
context 'when variable is not masked' do
before do
subject.masked = false
end
it { is_expected.to allow_value('hello').for(:value) }
it { is_expected.to allow_value('hello world').for(:value) }
it { is_expected.to allow_value('hello$VARIABLEworld').for(:value) }
it { is_expected.to allow_value('hello\rworld').for(:value) }
it { is_expected.to allow_value('helloworld').for(:value) }
end
end
describe 'REGEX' do
subject { Ci::Maskable::REGEX }
it 'does not match strings shorter than 8 letters' do
expect(subject.match?('hello')).to eq(false)
end
it 'does not match strings with spaces' do
expect(subject.match?('hello world')).to eq(false)
end
it 'does not match strings with shell variables' do
expect(subject.match?('hello$VARIABLEworld')).to eq(false)
end
it 'does not match strings with escape characters' do
expect(subject.match?('hello\rworld')).to eq(false)
end
it 'does not match strings that span more than one line' do
string = <<~EOS
hello
world
EOS
expect(subject.match?(string)).to eq(false)
end
it 'does not match strings using unsupported characters' do
expect(subject.match?('HelloWorld%#^')).to eq(false)
end
it 'matches valid strings' do
expect(subject.match?('Hello+World_123/@:-.')).to eq(true)
end
end
describe '#to_runner_variable' do
subject { variable.to_runner_variable }
it 'exposes the masked attribute' do
expect(subject).to include(:masked)
end
end
end
| 27.901235 | 78 | 0.65354 |
91a248296f40b21dbb2f1a10f5f8e3d6c97aff15 | 432 | require 'open-uri'
module Inflation
class Service
def calculate_price_now(year: year, amount: amount)
open(build_url(year, amount)).read.tr('""', '').to_d
end
private
def build_url(from, amount)
now = Date.current.year
"https://www.statbureau.org/calculate-inflation-price-json?country=united-states&start=#{from}%2F1%2F1&end=#{now}%2F12%2F1&amount=#{amount}&format=false"
end
end
end
| 22.736842 | 159 | 0.678241 |
d572c543924f4e44b67e1b53946fafa801f46cf1 | 496 | module Srunr
class Configuration
attr_accessor :options
DEFAULTS = {
hostname: ""
}
DEFAULTS.each do |k,v|
define_method(k) do
get_option(k)
end
define_method("#{k}=") do |val|
options[k] = val
end
end
def initialize(opts=nil)
@options = opts || DEFAULTS
end
def get_option(key)
val = options[key]
if val.respond_to?(:call)
val.call
else
val
end
end
end
end
| 14.588235 | 37 | 0.530242 |
18d058cc0051072d1bc258e7de225f673630a084 | 1,608 | class Babeld < Formula
desc "Loop-avoiding distance-vector routing protocol"
homepage "https://www.irif.fr/~jch/software/babel/"
url "https://www.irif.fr/~jch/software/files/babeld-1.9.2.tar.gz"
sha256 "154f00e0a8bf35d6ea9028886c3dc5c3c342dd1a367df55ef29a547b75867f07"
license "MIT"
head "https://github.com/jech/babeld.git"
livecheck do
url "https://www.irif.fr/~jch/software/files/"
regex(/href=.*?babeld[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "a7bb20a1f278ab2acc151622894d0e96ee81e9a9a0e53c1ecc9565f5906ed172" => :big_sur
sha256 "6a56133eedc55610cbd65c8862584e2a109702e6f6c3619c58bcc99a41c99da1" => :arm64_big_sur
sha256 "1e311a15868154bf204fe2d9d19ed1db24c830fcf9cfaa32cf1255d7ed35b108" => :catalina
sha256 "1ddbacdd3433b008c2ad86e582ab2376cf0bab93b7939bb9f47d6e1e1fd06ad3" => :mojave
sha256 "560f7d73b3d1f4c987766f8a669fc666f44711bfbdac99e8d353f8edb118fe38" => :x86_64_linux
end
def install
on_macos do
# LDLIBS='' fixes: ld: library not found for -lrt
system "make", "LDLIBS=''"
end
on_linux do
system "make"
end
system "make", "install", "PREFIX=#{prefix}"
end
test do
shell_output("#{bin}/babeld -I #{testpath}/test.pid -L #{testpath}/test.log", 1)
if OS.mac?
expected = <<~EOS
Couldn't tweak forwarding knob.: Operation not permitted
kernel_setup failed.
EOS
assert_equal expected, (testpath/"test.log").read
else
assert_match "kernel_setup failed", (testpath/"test.log").read
end
end
end
| 33.5 | 95 | 0.710199 |
bb08d95d0a73aa589ea8cde81d1db4bda23fdf21 | 427 | # frozen_string_literal: true
module SolidusMultiDomain
module Spree
module ProductDecorator
def self.prepended(base)
base.class_eval do
has_and_belongs_to_many :stores, join_table: 'spree_products_stores'
scope :by_store, lambda { |store| joins(:stores).where("spree_products_stores.store_id = ?", store) }
end
end
::Spree::Product.prepend self
end
end
end
| 23.722222 | 111 | 0.679157 |
5db9473418c3886913794a6aa44f57233c620ad7 | 881 | Pod::Spec.new do |s|
s.name = 'YPImagePicker'
s.version = "3.5.3"
s.summary = "Instagram-like image picker & filters for iOS"
s.homepage = "https://github.com/Yummypets/YPImagePicker"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = 'S4cha'
s.platform = :ios
s.source = { :git => "https://github.com/Yummypets/YPImagePicker.git",
:tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/sachadso'
s.requires_arc = true
s.ios.deployment_target = "9.0"
s.source_files = 'Source/**/*.swift'
s.dependency 'SteviaLayout', '~> 4.4.1'
s.dependency 'PryntTrimmerView', '~> 3.0.0'
s.resources = ['Resources/*', 'Source/**/*.xib']
s.description = "Instagram-like image picker & filters for iOS supporting videos and albums"
end
| 44.05 | 95 | 0.580023 |
089774a6ef73a99edcf5055097ff35548f5c88cb | 104 | class Admin::DashboardController < AdminController
def index
@dashboard = Dashboard.new
end
end
| 17.333333 | 50 | 0.759615 |
f783b84cbca860a4fc1eab31a413687eb3684c23 | 483 | require 'fastlane/plugin/qiye_wechat_bot/version'
module Fastlane
module QiyeWechatBot
# Return all .rb files inside the "actions" and "helper" directory
def self.all_classes
Dir[File.expand_path('**/{actions,helper}/*.rb', File.dirname(__FILE__))]
end
end
end
# By default we want to import all available actions and helpers
# A plugin can contain any number of actions and plugins
Fastlane::QiyeWechatBot.all_classes.each do |current|
require current
end
| 28.411765 | 79 | 0.753623 |
aca7df5445f9a3badebf63e13a80320f4215936d | 1,710 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::MergeRequests::SetLocked do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
specify { expect(described_class).to require_graphql_authorizations(:update_merge_request) }
describe '#resolve' do
let(:locked) { true }
let(:mutated_merge_request) { subject[:merge_request] }
subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, locked: locked) }
it 'raises an error if the resource is not accessible to the user' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
context 'when the user can update the merge request' do
before do
merge_request.project.add_developer(user)
end
it 'returns the merge request as discussion locked' do
expect(mutated_merge_request).to eq(merge_request)
expect(mutated_merge_request).to be_discussion_locked
expect(subject[:errors]).to be_empty
end
it 'returns errors merge request could not be updated' do
# Make the merge request invalid
merge_request.allow_broken = true
merge_request.update!(source_project: nil)
expect(subject[:errors]).not_to be_empty
end
context 'when passing locked as false' do
let(:locked) { false }
it 'unlocks the discussion' do
merge_request.update(discussion_locked: true)
expect(mutated_merge_request).not_to be_discussion_locked
end
end
end
end
end
| 31.666667 | 119 | 0.7 |
e2dcf60598dbd56318ab50058cd090ec04eb3b0b | 2,771 | #!/usr/bin/env ruby -S rspec
require 'spec_helper'
def store_valid_legacy_passwords(id_base, password_base, salt_base)
expected = { 'keys' => {}, 'folders' => [] }
# add passwords
(1..3).each do |num|
id = "#{id_base}_#{num}"
prev_password = "old #{password_base} #{num}"
current_password = "#{password_base} #{num}"
prev_salt = "old #{salt_base} #{num}"
current_salt = "#{salt_base} #{num}"
call_function('simplib::passgen::legacy::set', id, prev_password, prev_salt)
call_function('simplib::passgen::legacy::set', id, current_password, current_salt)
value = { 'password' => current_password, 'salt' => current_salt }
meta = { 'history' => [ [ prev_password, prev_salt ] ] }
expected['keys'][id] = { 'value' => value, 'metadata' => meta }
end
expected
end
describe 'simplib::passgen::legacy::list' do
let(:id_base) { 'my_id' }
let(:password_base) { 'password for my_id' }
let(:salt_base) { 'salt for my_id' }
context 'successes' do
it 'should return {} when the root folder does not exist' do
is_expected.to run.with_params().and_return( {} )
end
it 'should return empty password and folder results when the root folder is empty' do
# call subject() to make sure test Puppet environment is created
subject()
settings = call_function('simplib::passgen::legacy::common_settings')
FileUtils.mkdir_p(settings['keydir'])
expected = { 'keys' => {}, 'folders' => [] }
is_expected.to run.with_params().and_return( expected )
end
it 'should return password info and folders when root folder is not empty' do
subject()
expected = store_valid_legacy_passwords(id_base, password_base, salt_base)
is_expected.to run.with_params().and_return( expected )
end
it 'should skip bad entries' do
subject()
# store valid passwords
expected = store_valid_legacy_passwords(id_base, password_base, salt_base)
# store an invalid password entry (empty content)
settings = call_function('simplib::passgen::legacy::common_settings')
FileUtils.mkdir_p(settings['keydir'])
FileUtils.touch(File.join(settings['keydir'], 'bad_key'))
is_expected.to run.with_params().and_return( expected )
end
end
context 'failures' do
it 'fails when the password root directory cannot be accessed' do
subject()
settings = call_function('simplib::passgen::legacy::common_settings')
FileUtils.mkdir_p(settings['keydir'])
expect(Dir).to receive(:chdir).with(settings['keydir']).
and_raise(Errno::EACCES, 'chdir failed')
is_expected.to run.with_params().and_raise_error(Errno::EACCES,
'Permission denied - chdir failed')
end
end
end
| 34.209877 | 89 | 0.666185 |
18cccee01b83c4bb6cc0898ea284d50300eb2588 | 4,520 | # The pg_enum extension adds support for Sequel to handle PostgreSQL's enum
# types. To use this extension, first load it into your Database instance:
#
# DB.extension :pg_enum
#
# It allows creation of enum types using create_enum:
#
# DB.create_enum(:type_name, %w'value1 value2 value3')
#
# You can also add values to existing enums via add_enum_value:
#
# DB.add_enum_value(:enum_type_name, 'value4')
#
# If you want to drop an enum type, you can use drop_enum:
#
# DB.drop_enum(:type_name)
#
# Just like any user-created type, after creating the type, you
# can create tables that have a column of that type:
#
# DB.create_table(:table_name)
# enum_type_name :column_name
# end
#
# When parsing the schema, enum types are recognized, and available
# values returned in the schema hash:
#
# DB.schema(:table_name)
# [[:column_name, {:type=>:enum, :enum_values=>['value1', 'value2']}]]
#
# If the pg_array extension is used, arrays of enums are returned as a
# PGArray:
#
# DB.create_table(:table_name)
# column :column_name, 'enum_type_name[]'
# end
# DB[:table_name].get(:column_name)
# # ['value1', 'value2']
#
# Finally, typecasting for enums is setup to cast to strings, which
# allows you to use symbols in your model code. Similar, you can provide
# the enum values as symbols when creating enums using create_enum or
# add_enum_value.
#
module Sequel
module Postgres
# Methods enabling Database object integration with enum types.
module EnumDatabaseMethods
# Parse the available enum values when loading this extension into
# your database.
def self.extended(db)
db.send(:parse_enum_labels)
end
# Run the SQL to add the given value to the existing enum type.
# Options:
# :after :: Add the new value after this existing value.
# :before :: Add the new value before this existing value.
# :if_not_exists :: Do not raise an error if the value already exists in the enum.
def add_enum_value(enum, value, opts=OPTS)
sql = "ALTER TYPE #{quote_schema_table(enum)} ADD VALUE#{' IF NOT EXISTS' if opts[:if_not_exists]} #{literal(value.to_s)}"
if v = opts[:before]
sql << " BEFORE #{literal(v.to_s)}"
elsif v = opts[:after]
sql << " AFTER #{literal(v.to_s)}"
end
run sql
parse_enum_labels
nil
end
# Run the SQL to create an enum type with the given name and values.
def create_enum(enum, values)
sql = "CREATE TYPE #{quote_schema_table(enum)} AS ENUM (#{values.map{|v| literal(v.to_s)}.join(', ')})"
run sql
parse_enum_labels
nil
end
# Run the SQL to drop the enum type with the given name.
# Options:
# :if_exists :: Do not raise an error if the enum type does not exist
# :cascade :: Also drop other objects that depend on the enum type
def drop_enum(enum, opts=OPTS)
sql = "DROP TYPE#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(enum)}#{' CASCADE' if opts[:cascade]}"
run sql
parse_enum_labels
nil
end
private
# Parse the pg_enum table to get enum values, and
# the pg_type table to get names and array oids for
# enums.
def parse_enum_labels
@enum_labels = metadata_dataset.from(:pg_enum).
order(:enumtypid, :enumsortorder).
select_hash_groups(Sequel.cast(:enumtypid, Integer).as(:v), :enumlabel)
if respond_to?(:register_array_type)
array_types = metadata_dataset.
from(:pg_type).
where(:oid=>@enum_labels.keys).
exclude(:typarray=>0).
select_map([:typname, Sequel.cast(:typarray, Integer).as(:v)])
existing_oids = conversion_procs.keys
array_types.each do |name, oid|
next if existing_oids.include?(oid)
register_array_type(name, :oid=>oid)
end
end
end
# For schema entries that are enums, set the type to
# :enum and add a :enum_values entry with the enum values.
def schema_parse_table(*)
super.each do |_, s|
if values = @enum_labels[s[:oid]]
s[:type] = :enum
s[:enum_values] = values
end
end
end
# Typecast the given value to a string.
def typecast_value_enum(value)
value.to_s
end
end
end
Database.register_extension(:pg_enum, Postgres::EnumDatabaseMethods)
end
| 32.992701 | 130 | 0.64292 |
39dab86d9f48cbe1370aaa2caf37cb3c1469b278 | 151 | class AddUsersFullname < ActiveRecord::Migration
def change
change_table(:users) do |t|
t.string :full_name, default: ""
end
end
end
| 18.875 | 48 | 0.682119 |
ab782cd495f9d91ff9e006f3930cbb6976c23cac | 454 | # == Schema Information
#
# Table name: acts_as_relating_to_roles
#
# id :integer not null, primary key
# name :string
# display_name :string
# created_at :datetime not null
# updated_at :datetime not null
# reciprocal :string
#
require_dependency 'acts_as_relating_to'
class ActsAsRelatingTo::Role < ActiveRecord::Base
validates :name,
:display_name,
presence: true
end
| 22.7 | 55 | 0.636564 |
91139d421f567cfde8fdd18ee5a173460cae1d26 | 53,976 | require 'spec_helper'
module Gitlab
module Ci
describe YamlProcessor do
subject { described_class.new(config, user: nil) }
describe '#build_attributes' do
subject { described_class.new(config, user: nil).build_attributes(:rspec) }
describe 'attributes list' do
let(:config) do
YAML.dump(
before_script: ['pwd'],
rspec: { script: 'rspec' }
)
end
it 'returns valid build attributes' do
expect(subject).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
options: {
before_script: ["pwd"],
script: ["rspec"]
},
allow_failure: false,
when: "on_success",
yaml_variables: []
})
end
end
describe 'coverage entry' do
describe 'code coverage regexp' do
let(:config) do
YAML.dump(rspec: { script: 'rspec',
coverage: '/Code coverage: \d+\.\d+/' })
end
it 'includes coverage regexp in build attributes' do
expect(subject)
.to include(coverage_regex: 'Code coverage: \d+\.\d+')
end
end
end
describe 'retry entry' do
context 'when retry count is specified' do
let(:config) do
YAML.dump(rspec: { script: 'rspec', retry: { max: 1 } })
end
it 'includes retry count in build options attribute' do
expect(subject[:options]).to include(retry: { max: 1 })
end
end
context 'when retry count is not specified' do
let(:config) do
YAML.dump(rspec: { script: 'rspec' })
end
it 'does not persist retry count in the database' do
expect(subject[:options]).not_to have_key(:retry)
end
end
end
describe 'allow failure entry' do
context 'when job is a manual action' do
context 'when allow_failure is defined' do
let(:config) do
YAML.dump(rspec: { script: 'rspec',
when: 'manual',
allow_failure: false })
end
it 'is not allowed to fail' do
expect(subject[:allow_failure]).to be false
end
end
context 'when allow_failure is not defined' do
let(:config) do
YAML.dump(rspec: { script: 'rspec',
when: 'manual' })
end
it 'is allowed to fail' do
expect(subject[:allow_failure]).to be true
end
end
end
context 'when job is not a manual action' do
context 'when allow_failure is defined' do
let(:config) do
YAML.dump(rspec: { script: 'rspec',
allow_failure: false })
end
it 'is not allowed to fail' do
expect(subject[:allow_failure]).to be false
end
end
context 'when allow_failure is not defined' do
let(:config) do
YAML.dump(rspec: { script: 'rspec' })
end
it 'is not allowed to fail' do
expect(subject[:allow_failure]).to be false
end
end
end
end
describe 'delayed job entry' do
context 'when delayed is defined' do
let(:config) do
YAML.dump(rspec: { script: 'rollout 10%',
when: 'delayed',
start_in: '1 day' })
end
it 'has the attributes' do
expect(subject[:when]).to eq 'delayed'
expect(subject[:options][:start_in]).to eq '1 day'
end
end
end
end
describe '#stages_attributes' do
let(:config) do
YAML.dump(
rspec: { script: 'rspec', stage: 'test', only: ['branches'] },
prod: { script: 'cap prod', stage: 'deploy', only: ['tags'] }
)
end
let(:attributes) do
[{ name: "build",
index: 0,
builds: [] },
{ name: "test",
index: 1,
builds:
[{ stage_idx: 1,
stage: "test",
name: "rspec",
allow_failure: false,
when: "on_success",
yaml_variables: [],
options: { script: ["rspec"] },
only: { refs: ["branches"] },
except: {} }] },
{ name: "deploy",
index: 2,
builds:
[{ stage_idx: 2,
stage: "deploy",
name: "prod",
allow_failure: false,
when: "on_success",
yaml_variables: [],
options: { script: ["cap prod"] },
only: { refs: ["tags"] },
except: {} }] }]
end
it 'returns stages seed attributes' do
expect(subject.stages_attributes).to eq attributes
end
end
describe 'only / except policies validations' do
context 'when `only` has an invalid value' do
let(:config) { { rspec: { script: "rspec", type: "test", only: only } } }
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
context 'when it is integer' do
let(:only) { 1 }
it do
expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'jobs:rspec:only has to be either an array of conditions or a hash')
end
end
context 'when it is an array of integers' do
let(:only) { [1, 1] }
it do
expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'jobs:rspec:only config should be an array of strings or regexps')
end
end
context 'when it is invalid regex' do
let(:only) { ["/*invalid/"] }
it do
expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'jobs:rspec:only config should be an array of strings or regexps')
end
end
end
context 'when `except` has an invalid value' do
let(:config) { { rspec: { script: "rspec", except: except } } }
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
context 'when it is integer' do
let(:except) { 1 }
it do
expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'jobs:rspec:except has to be either an array of conditions or a hash')
end
end
context 'when it is an array of integers' do
let(:except) { [1, 1] }
it do
expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'jobs:rspec:except config should be an array of strings or regexps')
end
end
context 'when it is invalid regex' do
let(:except) { ["/*invalid/"] }
it do
expect { processor }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'jobs:rspec:except config should be an array of strings or regexps')
end
end
end
end
describe "Scripts handling" do
let(:config_data) { YAML.dump(config) }
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config_data) }
subject { config_processor.stage_builds_attributes('test').first }
describe "before_script" do
context "in global context" do
let(:config) do
{
before_script: ["global script"],
test: { script: ["script"] }
}
end
it "return commands with scripts concencaced" do
expect(subject[:options][:before_script]).to eq(["global script"])
end
end
context "overwritten in local context" do
let(:config) do
{
before_script: ["global script"],
test: { before_script: ["local script"], script: ["script"] }
}
end
it "return commands with scripts concencaced" do
expect(subject[:options][:before_script]).to eq(["local script"])
end
end
end
describe "script" do
let(:config) do
{
test: { script: ["script"] }
}
end
it "return commands with scripts concencaced" do
expect(subject[:options][:script]).to eq(["script"])
end
end
describe "after_script" do
context "in global context" do
let(:config) do
{
after_script: ["after_script"],
test: { script: ["script"] }
}
end
it "return after_script in options" do
expect(subject[:options][:after_script]).to eq(["after_script"])
end
end
context "overwritten in local context" do
let(:config) do
{
after_script: ["local after_script"],
test: { after_script: ["local after_script"], script: ["script"] }
}
end
it "return after_script in options" do
expect(subject[:options][:after_script]).to eq(["local after_script"])
end
end
end
end
describe "Image and service handling" do
context "when extended docker configuration is used" do
it "returns image and service when defined" do
config = YAML.dump({ image: { name: "ruby:2.1", entrypoint: ["/usr/local/bin/init", "run"] },
services: ["mysql", { name: "docker:dind", alias: "docker",
entrypoint: ["/usr/local/bin/init", "run"],
command: ["/usr/local/bin/init", "run"] }],
before_script: ["pwd"],
rspec: { script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
options: {
before_script: ["pwd"],
script: ["rspec"],
image: { name: "ruby:2.1", entrypoint: ["/usr/local/bin/init", "run"] },
services: [{ name: "mysql" },
{ name: "docker:dind", alias: "docker", entrypoint: ["/usr/local/bin/init", "run"],
command: ["/usr/local/bin/init", "run"] }]
},
allow_failure: false,
when: "on_success",
yaml_variables: []
})
end
it "returns image and service when overridden for job" do
config = YAML.dump({ image: "ruby:2.1",
services: ["mysql"],
before_script: ["pwd"],
rspec: { image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
services: [{ name: "postgresql", alias: "db-pg",
entrypoint: ["/usr/local/bin/init", "run"],
command: ["/usr/local/bin/init", "run"] }, "docker:dind"],
script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
options: {
before_script: ["pwd"],
script: ["rspec"],
image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
services: [{ name: "postgresql", alias: "db-pg", entrypoint: ["/usr/local/bin/init", "run"],
command: ["/usr/local/bin/init", "run"] },
{ name: "docker:dind" }]
},
allow_failure: false,
when: "on_success",
yaml_variables: []
})
end
end
context "when etended docker configuration is not used" do
it "returns image and service when defined" do
config = YAML.dump({ image: "ruby:2.1",
services: ["mysql", "docker:dind"],
before_script: ["pwd"],
rspec: { script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
options: {
before_script: ["pwd"],
script: ["rspec"],
image: { name: "ruby:2.1" },
services: [{ name: "mysql" }, { name: "docker:dind" }]
},
allow_failure: false,
when: "on_success",
yaml_variables: []
})
end
it "returns image and service when overridden for job" do
config = YAML.dump({ image: "ruby:2.1",
services: ["mysql"],
before_script: ["pwd"],
rspec: { image: "ruby:2.5", services: ["postgresql", "docker:dind"], script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
options: {
before_script: ["pwd"],
script: ["rspec"],
image: { name: "ruby:2.5" },
services: [{ name: "postgresql" }, { name: "docker:dind" }]
},
allow_failure: false,
when: "on_success",
yaml_variables: []
})
end
end
end
describe 'Variables' do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
subject { config_processor.builds.first[:yaml_variables] }
context 'when global variables are defined' do
let(:variables) do
{ 'VAR1' => 'value1', 'VAR2' => 'value2' }
end
let(:config) do
{
variables: variables,
before_script: ['pwd'],
rspec: { script: 'rspec' }
}
end
it 'returns global variables' do
expect(subject).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
end
end
context 'when job and global variables are defined' do
let(:global_variables) do
{ 'VAR1' => 'global1', 'VAR3' => 'global3' }
end
let(:job_variables) do
{ 'VAR1' => 'value1', 'VAR2' => 'value2' }
end
let(:config) do
{
before_script: ['pwd'],
variables: global_variables,
rspec: { script: 'rspec', variables: job_variables }
}
end
it 'returns all unique variables' do
expect(subject).to contain_exactly(
{ key: 'VAR3', value: 'global3', public: true },
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
end
end
context 'when job variables are defined' do
let(:config) do
{
before_script: ['pwd'],
rspec: { script: 'rspec', variables: variables }
}
end
context 'when syntax is correct' do
let(:variables) do
{ 'VAR1' => 'value1', 'VAR2' => 'value2' }
end
it 'returns job variables' do
expect(subject).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
end
end
context 'when syntax is incorrect' do
context 'when variables defined but invalid' do
let(:variables) do
%w(VAR1 value1 VAR2 value2)
end
it 'raises error' do
expect { subject }
.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
/jobs:rspec:variables config should be a hash of key value pairs/)
end
end
context 'when variables key defined but value not specified' do
let(:variables) do
nil
end
it 'returns empty array' do
##
# When variables config is empty, we assume this is a valid
# configuration, see issue #18775
#
expect(subject).to be_an_instance_of(Array)
expect(subject).to be_empty
end
end
end
end
context 'when job variables are not defined' do
let(:config) do
{
before_script: ['pwd'],
rspec: { script: 'rspec' }
}
end
it 'returns empty array' do
expect(subject).to be_an_instance_of(Array)
expect(subject).to be_empty
end
end
end
context 'when using `extends`' do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
subject { config_processor.builds.first }
context 'when using simple `extends`' do
let(:config) do
<<~YAML
.template:
script: test
rspec:
extends: .template
image: ruby:alpine
YAML
end
it 'correctly extends rspec job' do
expect(config_processor.builds).to be_one
expect(subject.dig(:options, :script)).to eq %w(test)
expect(subject.dig(:options, :image, :name)).to eq 'ruby:alpine'
end
end
context 'when using recursive `extends`' do
let(:config) do
<<~YAML
rspec:
extends: .test
script: rspec
when: always
.template:
before_script:
- bundle install
.test:
extends: .template
script: test
image: image:test
YAML
end
it 'correctly extends rspec job' do
expect(config_processor.builds).to be_one
expect(subject.dig(:options, :before_script)).to eq ["bundle install"]
expect(subject.dig(:options, :script)).to eq %w(rspec)
expect(subject.dig(:options, :image, :name)).to eq 'image:test'
expect(subject.dig(:when)).to eq 'always'
end
end
end
describe "When" do
%w(on_success on_failure always).each do |when_state|
it "returns #{when_state} when defined" do
config = YAML.dump({
rspec: { script: "rspec", when: when_state }
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
builds = config_processor.stage_builds_attributes("test")
expect(builds.size).to eq(1)
expect(builds.first[:when]).to eq(when_state)
end
end
end
describe 'Parallel' do
context 'when job is parallelized' do
let(:parallel) { 5 }
let(:config) do
YAML.dump(rspec: { script: 'rspec',
parallel: parallel })
end
it 'returns parallelized jobs' do
config_processor = Gitlab::Ci::YamlProcessor.new(config)
builds = config_processor.stage_builds_attributes('test')
build_options = builds.map { |build| build[:options] }
expect(builds.size).to eq(5)
expect(build_options).to all(include(:instance, parallel: parallel))
end
it 'does not have the original job' do
config_processor = Gitlab::Ci::YamlProcessor.new(config)
builds = config_processor.stage_builds_attributes('test')
expect(builds).not_to include(:rspec)
end
end
end
describe 'cache' do
context 'when cache definition has unknown keys' do
it 'raises relevant validation error' do
config = YAML.dump(
{ cache: { untracked: true, invalid: 'key' },
rspec: { script: 'rspec' } })
expect { Gitlab::Ci::YamlProcessor.new(config) }.to raise_error(
Gitlab::Ci::YamlProcessor::ValidationError,
'cache config contains unknown keys: invalid'
)
end
end
it "returns cache when defined globally" do
config = YAML.dump({
cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
rspec: {
script: "rspec"
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
key: 'key',
policy: 'pull-push'
)
end
it "returns cache when defined in a job" do
config = YAML.dump({
rspec: {
cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'key' },
script: "rspec"
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq(
paths: ["logs/", "binaries/"],
untracked: true,
key: 'key',
policy: 'pull-push'
)
end
it "overwrite cache when defined for a job and globally" do
config = YAML.dump({
cache: { paths: ["logs/", "binaries/"], untracked: true, key: 'global' },
rspec: {
script: "rspec",
cache: { paths: ["test/"], untracked: false, key: 'local' }
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first[:options][:cache]).to eq(
paths: ["test/"],
untracked: false,
key: 'local',
policy: 'pull-push'
)
end
end
describe "Artifacts" do
it "returns artifacts when defined" do
config = YAML.dump({
image: "ruby:2.1",
services: ["mysql"],
before_script: ["pwd"],
rspec: {
artifacts: {
paths: ["logs/", "binaries/"],
untracked: true,
name: "custom_name",
expire_in: "7d"
},
script: "rspec"
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
expect(config_processor.stage_builds_attributes("test").size).to eq(1)
expect(config_processor.stage_builds_attributes("test").first).to eq({
stage: "test",
stage_idx: 1,
name: "rspec",
options: {
before_script: ["pwd"],
script: ["rspec"],
image: { name: "ruby:2.1" },
services: [{ name: "mysql" }],
artifacts: {
name: "custom_name",
paths: ["logs/", "binaries/"],
untracked: true,
expire_in: "7d"
}
},
when: "on_success",
allow_failure: false,
yaml_variables: []
})
end
%w[on_success on_failure always].each do |when_state|
it "returns artifacts for when #{when_state} defined" do
config = YAML.dump({
rspec: {
script: "rspec",
artifacts: { paths: ["logs/", "binaries/"], when: when_state }
}
})
config_processor = Gitlab::Ci::YamlProcessor.new(config)
builds = config_processor.stage_builds_attributes("test")
expect(builds.size).to eq(1)
expect(builds.first[:options][:artifacts][:when]).to eq(when_state)
end
end
end
describe '#environment' do
let(:config) do
{
deploy_to_production: { stage: 'deploy', script: 'test', environment: environment }
}
end
let(:processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
let(:builds) { processor.stage_builds_attributes('deploy') }
context 'when a production environment is specified' do
let(:environment) { 'production' }
it 'does return production' do
expect(builds.size).to eq(1)
expect(builds.first[:environment]).to eq(environment)
expect(builds.first[:options]).to include(environment: { name: environment, action: "start" })
end
end
context 'when hash is specified' do
let(:environment) do
{ name: 'production',
url: 'http://production.gitlab.com' }
end
it 'does return production and URL' do
expect(builds.size).to eq(1)
expect(builds.first[:environment]).to eq(environment[:name])
expect(builds.first[:options]).to include(environment: environment)
end
context 'the url has a port as variable' do
let(:environment) do
{ name: 'production',
url: 'http://production.gitlab.com:$PORT' }
end
it 'allows a variable for the port' do
expect(builds.size).to eq(1)
expect(builds.first[:environment]).to eq(environment[:name])
expect(builds.first[:options]).to include(environment: environment)
end
end
end
context 'when no environment is specified' do
let(:environment) { nil }
it 'does return nil environment' do
expect(builds.size).to eq(1)
expect(builds.first[:environment]).to be_nil
end
end
context 'is not a string' do
let(:environment) { 1 }
it 'raises error' do
expect { builds }.to raise_error(
'jobs:deploy_to_production:environment config should be a hash or a string')
end
end
context 'is not a valid string' do
let(:environment) { 'production:staging' }
it 'raises error' do
expect { builds }.to raise_error("jobs:deploy_to_production:environment name #{Gitlab::Regex.environment_name_regex_message}")
end
end
context 'when on_stop is specified' do
let(:review) { { stage: 'deploy', script: 'test', environment: { name: 'review', on_stop: 'close_review' } } }
let(:config) { { review: review, close_review: close_review }.compact }
context 'with matching job' do
let(:close_review) { { stage: 'deploy', script: 'test', environment: { name: 'review', action: 'stop' } } }
it 'does return a list of builds' do
expect(builds.size).to eq(2)
expect(builds.first[:environment]).to eq('review')
end
end
context 'without matching job' do
let(:close_review) { nil }
it 'raises error' do
expect { builds }.to raise_error('review job: on_stop job close_review is not defined')
end
end
context 'with close job without environment' do
let(:close_review) { { stage: 'deploy', script: 'test' } }
it 'raises error' do
expect { builds }.to raise_error('review job: on_stop job close_review does not have environment defined')
end
end
context 'with close job for different environment' do
let(:close_review) { { stage: 'deploy', script: 'test', environment: 'production' } }
it 'raises error' do
expect { builds }.to raise_error('review job: on_stop job close_review have different environment name')
end
end
context 'with close job without stop action' do
let(:close_review) { { stage: 'deploy', script: 'test', environment: { name: 'review' } } }
it 'raises error' do
expect { builds }.to raise_error('review job: on_stop job close_review needs to have action stop defined')
end
end
end
end
describe "Dependencies" do
let(:config) do
{
build1: { stage: 'build', script: 'test' },
build2: { stage: 'build', script: 'test' },
test1: { stage: 'test', script: 'test', dependencies: dependencies },
test2: { stage: 'test', script: 'test' },
deploy: { stage: 'test', script: 'test' }
}
end
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)) }
context 'no dependencies' do
let(:dependencies) { }
it { expect { subject }.not_to raise_error }
end
context 'dependencies to builds' do
let(:dependencies) { %w(build1 build2) }
it { expect { subject }.not_to raise_error }
end
context 'dependencies to builds defined as symbols' do
let(:dependencies) { [:build1, :build2] }
it { expect { subject }.not_to raise_error }
end
context 'undefined dependency' do
let(:dependencies) { ['undefined'] }
it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: undefined dependency: undefined') }
end
context 'dependencies to deploy' do
let(:dependencies) { ['deploy'] }
it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'test1 job: dependency deploy is not defined in prior stages') }
end
end
describe "Hidden jobs" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
subject { config_processor.stage_builds_attributes("test") }
shared_examples 'hidden_job_handling' do
it "doesn't create jobs that start with dot" do
expect(subject.size).to eq(1)
expect(subject.first).to eq({
stage: "test",
stage_idx: 1,
name: "normal_job",
options: {
script: ["test"]
},
when: "on_success",
allow_failure: false,
yaml_variables: []
})
end
end
context 'when hidden job have a script definition' do
let(:config) do
YAML.dump({
'.hidden_job' => { image: 'ruby:2.1', script: 'test' },
'normal_job' => { script: 'test' }
})
end
it_behaves_like 'hidden_job_handling'
end
context "when hidden job doesn't have a script definition" do
let(:config) do
YAML.dump({
'.hidden_job' => { image: 'ruby:2.1' },
'normal_job' => { script: 'test' }
})
end
it_behaves_like 'hidden_job_handling'
end
end
describe "YAML Alias/Anchor" do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
subject { config_processor.stage_builds_attributes("build") }
shared_examples 'job_templates_handling' do
it "is correctly supported for jobs" do
expect(subject.size).to eq(2)
expect(subject.first).to eq({
stage: "build",
stage_idx: 0,
name: "job1",
options: {
script: ["execute-script-for-job"]
},
when: "on_success",
allow_failure: false,
yaml_variables: []
})
expect(subject.second).to eq({
stage: "build",
stage_idx: 0,
name: "job2",
options: {
script: ["execute-script-for-job"]
},
when: "on_success",
allow_failure: false,
yaml_variables: []
})
end
end
context 'when template is a job' do
let(:config) do
<<~EOT
job1: &JOBTMPL
stage: build
script: execute-script-for-job
job2: *JOBTMPL
EOT
end
it_behaves_like 'job_templates_handling'
end
context 'when template is a hidden job' do
let(:config) do
<<~EOT
.template: &JOBTMPL
stage: build
script: execute-script-for-job
job1: *JOBTMPL
job2: *JOBTMPL
EOT
end
it_behaves_like 'job_templates_handling'
end
context 'when job adds its own keys to a template definition' do
let(:config) do
<<~EOT
.template: &JOBTMPL
stage: build
job1:
<<: *JOBTMPL
script: execute-script-for-job
job2:
<<: *JOBTMPL
script: execute-script-for-job
EOT
end
it_behaves_like 'job_templates_handling'
end
end
describe "Error handling" do
it "fails to parse YAML" do
expect do
Gitlab::Ci::YamlProcessor.new("invalid: yaml: test")
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
end
it "indicates that object is invalid" do
expect do
Gitlab::Ci::YamlProcessor.new("invalid_yaml")
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError)
end
it "returns errors if tags parameter is invalid" do
config = YAML.dump({ rspec: { script: "test", tags: "mysql" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec tags should be an array of strings")
end
it "returns errors if before_script parameter is invalid" do
config = YAML.dump({ before_script: "bundle update", rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "before_script config should be an array of strings")
end
it "returns errors if job before_script parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", before_script: [10, "test"] } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:before_script config should be an array of strings")
end
it "returns errors if after_script parameter is invalid" do
config = YAML.dump({ after_script: "bundle update", rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "after_script config should be an array of strings")
end
it "returns errors if job after_script parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", after_script: [10, "test"] } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:after_script config should be an array of strings")
end
it "returns errors if image parameter is invalid" do
config = YAML.dump({ image: ["test"], rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "image config should be a hash or a string")
end
it "returns errors if job name is blank" do
config = YAML.dump({ '' => { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:job name can't be blank")
end
it "returns errors if job name is non-string" do
config = YAML.dump({ 10 => { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:10 name should be a symbol")
end
it "returns errors if job image parameter is invalid" do
config = YAML.dump({ rspec: { script: "test", image: ["test"] } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:image config should be a hash or a string")
end
it "returns errors if services parameter is not an array" do
config = YAML.dump({ services: "test", rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "services config should be a array")
end
it "returns errors if services parameter is not an array of strings" do
config = YAML.dump({ services: [10, "test"], rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "service config should be a hash or a string")
end
it "returns errors if job services parameter is not an array" do
config = YAML.dump({ rspec: { script: "test", services: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:services config should be a array")
end
it "returns errors if job services parameter is not an array of strings" do
config = YAML.dump({ rspec: { script: "test", services: [10, "test"] } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "service config should be a hash or a string")
end
it "returns error if job configuration is invalid" do
config = YAML.dump({ extra: "bundle update" })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:extra config should be a hash")
end
it "returns errors if services configuration is not correct" do
config = YAML.dump({ extra: { script: 'rspec', services: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:extra:services config should be a array")
end
it "returns errors if there are no jobs defined" do
config = YAML.dump({ before_script: ["bundle update"] })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
end
it "returns errors if there are no visible jobs defined" do
config = YAML.dump({ before_script: ["bundle update"], '.hidden'.to_sym => { script: 'ls' } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
end
it "returns errors if job allow_failure parameter is not an boolean" do
config = YAML.dump({ rspec: { script: "test", allow_failure: "string" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec allow failure should be a boolean value")
end
it "returns errors if job stage is not a string" do
config = YAML.dump({ rspec: { script: "test", type: 1 } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:type config should be a string")
end
it "returns errors if job stage is not a pre-defined stage" do
config = YAML.dump({ rspec: { script: "test", type: "acceptance" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be build, test, deploy")
end
it "returns errors if job stage is not a defined stage" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", type: "acceptance" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "rspec job: stage parameter should be build, test")
end
it "returns errors if stages is not an array" do
config = YAML.dump({ stages: "test", rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
end
it "returns errors if stages is not an array of strings" do
config = YAML.dump({ stages: [true, "test"], rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "stages config should be an array of strings")
end
it "returns errors if variables is not a map" do
config = YAML.dump({ variables: "test", rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
end
it "returns errors if variables is not a map of key-value strings" do
config = YAML.dump({ variables: { test: false }, rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "variables config should be a hash of key value pairs")
end
it "returns errors if job when is not on_success, on_failure or always" do
config = YAML.dump({ rspec: { script: "test", when: 1 } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec when should be on_success, on_failure, always, manual or delayed")
end
it "returns errors if job artifacts:name is not an a string" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { name: 1 } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts name should be a string")
end
it "returns errors if job artifacts:when is not an a predefined value" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { when: 1 } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts when should be on_success, on_failure or always")
end
it "returns errors if job artifacts:expire_in is not an a string" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: 1 } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts expire in should be a duration")
end
it "returns errors if job artifacts:expire_in is not an a valid duration" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { expire_in: "7 elephants" } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts expire in should be a duration")
end
it "returns errors if job artifacts:untracked is not an array of strings" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { untracked: "string" } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts untracked should be a boolean value")
end
it "returns errors if job artifacts:paths is not an array of strings" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", artifacts: { paths: "string" } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:artifacts paths should be an array of strings")
end
it "returns errors if cache:untracked is not an array of strings" do
config = YAML.dump({ cache: { untracked: "string" }, rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:untracked config should be a boolean value")
end
it "returns errors if cache:paths is not an array of strings" do
config = YAML.dump({ cache: { paths: "string" }, rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:paths config should be an array of strings")
end
it "returns errors if cache:key is not a string" do
config = YAML.dump({ cache: { key: 1 }, rspec: { script: "test" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "cache:key config should be a string or symbol")
end
it "returns errors if job cache:key is not an a string" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { key: 1 } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:key config should be a string or symbol")
end
it "returns errors if job cache:untracked is not an array of strings" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { untracked: "string" } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:untracked config should be a boolean value")
end
it "returns errors if job cache:paths is not an array of strings" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", cache: { paths: "string" } } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec:cache:paths config should be an array of strings")
end
it "returns errors if job dependencies is not an array of strings" do
config = YAML.dump({ types: %w(build test), rspec: { script: "test", dependencies: "string" } })
expect do
Gitlab::Ci::YamlProcessor.new(config)
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec dependencies should be an array of strings")
end
it 'returns errors if pipeline variables expression policy is invalid' do
config = YAML.dump({ rspec: { script: 'test', only: { variables: ['== null'] } } })
expect { Gitlab::Ci::YamlProcessor.new(config) }
.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'jobs:rspec:only variables invalid expression syntax')
end
it 'returns errors if pipeline changes policy is invalid' do
config = YAML.dump({ rspec: { script: 'test', only: { changes: [1] } } })
expect { Gitlab::Ci::YamlProcessor.new(config) }
.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'jobs:rspec:only changes should be an array of strings')
end
it 'returns errors if extended hash configuration is invalid' do
config = YAML.dump({ rspec: { extends: 'something', script: 'test' } })
expect { Gitlab::Ci::YamlProcessor.new(config) }
.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'rspec: unknown key in `extends`')
end
end
describe "#validation_message" do
subject { Gitlab::Ci::YamlProcessor.validation_message(content) }
context "when the YAML could not be parsed" do
let(:content) { YAML.dump("invalid: yaml: test") }
it { is_expected.to eq "Invalid configuration format" }
end
context "when the tags parameter is invalid" do
let(:content) { YAML.dump({ rspec: { script: "test", tags: "mysql" } }) }
it { is_expected.to eq "jobs:rspec tags should be an array of strings" }
end
context "when YAML content is empty" do
let(:content) { '' }
it { is_expected.to eq "Please provide content of .gitlab-ci.yml" }
end
context 'when the YAML contains an unknown alias' do
let(:content) { 'steps: *bad_alias' }
it { is_expected.to eq "Unknown alias: bad_alias" }
end
context "when the YAML is valid" do
let(:content) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
it { is_expected.to be_nil }
end
end
end
end
end
| 38.091743 | 157 | 0.522603 |
08c2319cbca67851cd1ab1c4ce82800643d0d68d | 713 | module Fog
module OpenStack
class Network
class Real
def get_network_ip_availability(network_id)
request(
:expects => [200],
:method => 'GET',
:path => "network-ip-availabilities/#{network_id}"
)
end
end
class Mock
def get_network_ip_availability(network_id)
response = Excon::Response.new
if data = self.data[:network_ip_availabilities].first
response.status = 200
response.body = {'network_ip_availability' => data}
response
else
raise Fog::OpenStack::Network::NotFound
end
end
end
end
end
end
| 24.586207 | 65 | 0.545582 |
acd064edf92e85e42e41975371a97edd92e71272 | 141 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_speakeasy_session'
| 35.25 | 79 | 0.808511 |
5d3c20f67f9e0a7f695a02e9d8813277ada12b0d | 8,261 | # encoding: US-ASCII
require "unit_spec_helper"
require 'unit/notification_shared.rb'
describe Rpush::Apns::Notification do
it_should_behave_like 'an Notification subclass'
let(:app) { Rpush::Apns::App.create!(:name => 'my_app', :environment => 'development', :certificate => TEST_CERT) }
let(:notification_class) { Rpush::Apns::Notification }
let(:notification) { notification_class.new }
let(:data_setter) { 'attributes_for_device=' }
let(:data_getter) { 'attributes_for_device' }
it "should validate the format of the device_token" do
notification = Rpush::Apns::Notification.new(:device_token => "{$%^&*()}")
notification.valid?.should be_false
notification.errors[:device_token].include?("is invalid").should be_true
end
it "should validate the length of the binary conversion of the notification" do
notification.device_token = "a" * 64
notification.alert = "way too long!" * 100
notification.valid?.should be_false
notification.errors[:base].include?("APN notification cannot be larger than 256 bytes. Try condensing your alert and device attributes.").should be_true
end
it "should default the sound to 'default'" do
notification.sound.should eq('default')
end
it "should default the expiry to 1 day" do
notification.expiry.should eq 1.day.to_i
end
end
describe Rpush::Apns::Notification, "when assigning the device token" do
it "should strip spaces from the given string" do
notification = Rpush::Apns::Notification.new(:device_token => "o m g")
notification.device_token.should eq "omg"
end
it "should strip chevrons from the given string" do
notification = Rpush::Apns::Notification.new(:device_token => "<omg>")
notification.device_token.should eq "omg"
end
end
describe Rpush::Apns::Notification, "as_json" do
it "should include the alert if present" do
notification = Rpush::Apns::Notification.new(:alert => "hi mom")
notification.as_json["aps"]["alert"].should eq "hi mom"
end
it "should not include the alert key if the alert is not present" do
notification = Rpush::Apns::Notification.new(:alert => nil)
notification.as_json["aps"].key?("alert").should be_false
end
it "should encode the alert as JSON if it is a Hash" do
notification = Rpush::Apns::Notification.new(:alert => { 'body' => "hi mom", 'alert-loc-key' => "View" })
notification.as_json["aps"]["alert"].should eq ({ 'body' => "hi mom", 'alert-loc-key' => "View" })
end
it "should include the badge if present" do
notification = Rpush::Apns::Notification.new(:badge => 6)
notification.as_json["aps"]["badge"].should eq 6
end
it "should not include the badge key if the badge is not present" do
notification = Rpush::Apns::Notification.new(:badge => nil)
notification.as_json["aps"].key?("badge").should be_false
end
it "should include the sound if present" do
notification = Rpush::Apns::Notification.new(:alert => "my_sound.aiff")
notification.as_json["aps"]["alert"].should eq "my_sound.aiff"
end
it "should not include the sound key if the sound is not present" do
notification = Rpush::Apns::Notification.new(:sound => false)
notification.as_json["aps"].key?("sound").should be_false
end
it "should include attributes for the device" do
notification = Rpush::Apns::Notification.new
notification.attributes_for_device = {:omg => :lol, :wtf => :dunno}
notification.as_json["omg"].should eq "lol"
notification.as_json["wtf"].should eq "dunno"
end
it "should allow attributes to include a hash" do
notification = Rpush::Apns::Notification.new
notification.attributes_for_device = {:omg => {:ilike => :hashes}}
notification.as_json["omg"]["ilike"].should eq "hashes"
end
end
describe Rpush::Apns::Notification, 'MDM' do
let(:magic) { 'abc123' }
let(:notification) { Rpush::Apns::Notification.new }
it 'includes the mdm magic in the payload' do
notification.mdm = magic
notification.as_json.should eq ({'mdm' => magic})
end
it 'does not include aps attribute' do
notification.alert = "i'm doomed"
notification.mdm = magic
notification.as_json.key?('aps').should be_false
end
end
describe Rpush::Apns::Notification, 'content-available' do
let(:notification) { Rpush::Apns::Notification.new }
it 'includes content-available in the payload' do
notification.content_available = true
notification.as_json['aps']['content-available'].should eq 1
end
it 'does not include content-available in the payload if not set' do
notification.as_json['aps'].key?('content-available').should be_false
end
it 'does not include content-available as a non-aps attribute' do
notification.content_available = true
notification.as_json.key?('content-available').should be_false
end
it 'does not overwrite existing attributes for the device' do
notification.data = {:hi => :mom}
notification.content_available = true
notification.as_json['aps']['content-available'].should eq 1
notification.as_json['hi'].should eq 'mom'
end
it 'does not overwrite the content-available flag when setting attributes for the device' do
notification.content_available = true
notification.data = {:hi => :mom}
notification.as_json['aps']['content-available'].should eq 1
notification.as_json['hi'].should eq 'mom'
end
end
describe Rpush::Apns::Notification, "to_binary" do
it "should correctly convert the notification to binary" do
notification = Rpush::Apns::Notification.new
notification.device_token = "a" * 64
notification.sound = "1.aiff"
notification.badge = 3
notification.alert = "Don't panic Mr Mainwaring, don't panic!"
notification.attributes_for_device = {:hi => :mom}
notification.expiry = 86400 # 1 day, \x00\x01Q\x80
notification.app = Rpush::Apns::App.new(:name => 'my_app', :environment => 'development', :certificate => TEST_CERT)
notification.stub(:id).and_return(1234)
notification.to_binary.should eq "\x01\x00\x00\x04\xD2\x00\x01Q\x80\x00 \xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\xAA\x00a{\"aps\":{\"alert\":\"Don't panic Mr Mainwaring, don't panic!\",\"badge\":3,\"sound\":\"1.aiff\"},\"hi\":\"mom\"}"
end
end
describe Rpush::Apns::Notification, "bug #31" do
it 'does not confuse a JSON looking string as JSON' do
notification = Rpush::Apns::Notification.new
notification.alert = "{\"one\":2}"
notification.alert.should eq "{\"one\":2}"
end
it 'does confuse a JSON looking string as JSON if the alert_is_json attribute is not present' do
notification = Rpush::Apns::Notification.new
notification.stub(:has_attribute? => false)
notification.alert = "{\"one\":2}"
notification.alert.should eq ({"one" => 2})
end
end
describe Rpush::Apns::Notification, "bug #35" do
it "should limit payload size to 256 bytes but not the entire packet" do
notification = Rpush::Apns::Notification.new do |n|
n.device_token = "a" * 64
n.alert = "a" * 210
n.app = Rpush::Apns::App.create!(:name => 'my_app', :environment => 'development', :certificate => TEST_CERT)
end
notification.to_binary(:for_validation => true).bytesize.should > 256
notification.payload_size.should < 256
notification.should be_valid
end
end
describe Rpush::Apns::Notification, "multi_json usage" do
describe Rpush::Apns::Notification, "alert" do
it "should call MultiJson.load when multi_json version is 1.3.0" do
notification = Rpush::Apns::Notification.new(:alert => { :a => 1 }, :alert_is_json => true)
Gem.stub(:loaded_specs).and_return( { 'multi_json' => Gem::Specification.new('multi_json', '1.3.0') } )
MultiJson.should_receive(:load).with(any_args())
notification.alert
end
it "should call MultiJson.decode when multi_json version is 1.2.9" do
notification = Rpush::Apns::Notification.new(:alert => { :a => 1 }, :alert_is_json => true)
Gem.stub(:loaded_specs).and_return( { 'multi_json' => Gem::Specification.new('multi_json', '1.2.9') } )
MultiJson.should_receive(:decode).with(any_args())
notification.alert
end
end
end
| 39.526316 | 323 | 0.700521 |
d5ed14dcb5a055b49773f97bee66f2023e562608 | 8,238 | describe ManageIQ::Providers::EmbeddedAnsible::AutomationManager::ConfigurationScript do
let(:ansible_script_source) { FactoryBot.create(:embedded_ansible_configuration_script_source) }
let(:playbook) { FactoryBot.create(:embedded_playbook, :configuration_script_source => ansible_script_source) }
let(:manager_with_configuration_scripts) { FactoryBot.create(:embedded_automation_manager_ansible, :provider, :configuration_script) }
before do
EvmSpecHelper.assign_embedded_ansible_role
ansible_script_source.update(:manager_id => manager.id)
manager_with_configuration_scripts.configuration_scripts.each do |cs|
cs.parent_id = playbook.id
cs.save
end
# Note: For some unkown reason, this is required or the belongs_to specs
# returns two result when calling manager.configuration_scripts for the
# second time...
manager_with_configuration_scripts.reload
end
# The following specs are copied from the 'ansible configuration_script' spec
# helper from the AnsibleTower Provider repo, but have been modified to make
# sense for the case of AnsibleRunner. Previously was:
#
# it_behaves_like 'ansible configuration_script'
#
# Below are `let` calls from there was well.
#
let(:manager) { manager_with_configuration_scripts }
it "belongs_to the manager" do
expect(manager.configuration_scripts.size).to eq 1
expect(manager.configuration_scripts.first.variables).to eq :instance_ids => ['i-3434']
expect(manager.configuration_scripts.first).to be_a ConfigurationScript
end
context "#run" do
let(:cs) { manager.configuration_scripts.first }
before do
expect(cs.parent.configuration_script_source).to receive(:checkout_git_repository)
end
it "launches the referenced ansible job template" do
job = cs.run
expect(job).to be_a ManageIQ::Providers::AnsiblePlaybookWorkflow
expect(job.options[:env_vars]).to eq({})
expect(job.options[:extra_vars]).to eq(:instance_ids => ["i-3434"])
expect(File.basename(job.options[:playbook_path])).to eq(playbook.name)
expect(job.options[:timeout]).to eq(1.hour)
expect(job.options[:verbosity]).to eq(0)
end
it "accepts different variables to launch a job template against" do
job = cs.run(:extra_vars => {:some_key => :some_value})
expect(job).to be_a ManageIQ::Providers::AnsiblePlaybookWorkflow
expect(job.options[:env_vars]).to eq({})
expect(job.options[:extra_vars]).to eq(:instance_ids => ["i-3434"], :some_key => :some_value)
end
it "passes execution_ttl to the job as its timeout" do
job = cs.run(:execution_ttl => "5")
expect(job).to be_a ManageIQ::Providers::AnsiblePlaybookWorkflow
expect(job.options[:timeout]).to eq(5.minutes)
end
it "passes verbosity to the job when specified" do
job = cs.run(:verbosity => "5")
expect(job).to be_a ManageIQ::Providers::AnsiblePlaybookWorkflow
expect(job.options[:verbosity]).to eq(5)
end
it "passes become_enabled to the job when specified" do
job = cs.run(:become_enabled => true)
expect(job).to be_a ManageIQ::Providers::AnsiblePlaybookWorkflow
expect(job.options[:become_enabled]).to eq(true)
end
end
context "#merge_extra_vars" do
it "merges internal and external hashes to send out to ansible_runner" do
config_script = manager.configuration_scripts.first
external = {:some_key => :some_value}
internal = config_script.variables
merged = config_script.send(:merge_extra_vars, external)
expect(internal).to be_a Hash
expect(merged).to eq(:instance_ids => ["i-3434"], :some_key => :some_value)
end
it "merges an internal hash and an empty hash to send out to ansible_runner" do
config_script = manager.configuration_scripts.first
external = nil
merged = config_script.send(:merge_extra_vars, external)
expect(merged).to eq(:instance_ids => ["i-3434"])
end
it "merges an empty internal hash and a hash to send out to the tower gem" do
config_script = manager.configuration_scripts.first.tap { |cs| cs.variables = {} }
external = {:some_key => :some_value}
merged = config_script.send(:merge_extra_vars, external)
expect(merged).to eq(external)
end
it "merges all empty arguments to send out to the tower gem" do
config_script = manager.configuration_scripts.first.tap { |cs| cs.variables = {} }
external = nil
merged = config_script.send(:merge_extra_vars, external)
expect(merged).to eq({})
end
it "decrypts extra_vars before sending out to the tower gem" do
config_script = manager.configuration_scripts.first
password = "password::#{ManageIQ::Password.encrypt("some_value")}"
external = {:some_key => password}
merged = config_script.send(:merge_extra_vars, external)
expect(merged).to eq(:instance_ids => ["i-3434"], :some_key => "some_value")
end
end
# was `context "CUD via the API"`
context "CRUD operations" do
let(:manager) { FactoryBot.create(:embedded_automation_manager_ansible, :provider) }
let(:params) do
{
:description => "Description",
:extra_vars => {}.to_json,
:inventory_id => 1,
:playbook => playbook.name,
:name => "My Job Template",
:related => {}
}
end
context ".create_in_provider" do
it "successfully created in provider" do
new_config_script = described_class.create_in_provider(manager.id, params)
expect(new_config_script).to be_a(described_class)
expect(new_config_script.manager_id).to eq(manager.id)
end
it "raises an error when the playbook does not exist from the script source" do
params[:playbook] = "not_a_playbook.yaml"
error_msg = 'Playbook name="not_a_playbook.yaml" no longer exists'
expect do
described_class.create_in_provider(manager.id, params)
end.to raise_error(RuntimeError, error_msg)
end
# TODO: Determine if we want to have a uniqueness validation to
# replicate this functionality, otherwise delete this case.
#
# context "provider raises on create" do
# it "with a string" do
# expect(AnsibleTowerClient::Connection).to receive(:new).and_return(atc)
# expect(job_templates).to receive(:create!).and_raise(AnsibleTowerClient::Error, "Job template with this Name already exists.")
# expect { described_class.create_in_provider(manager.id, params) }.to raise_error(AnsibleTowerClient::Error, "Job template with this Name already exists.")
# end
# end
end
it ".create_in_provider_queue" do
EvmSpecHelper.local_miq_server
task_id = described_class.create_in_provider_queue(manager.id, params)
expect(MiqTask.find(task_id)).to have_attributes(:name => "Creating #{described_class::FRIENDLY_NAME} (name=#{params[:name]})")
expect(MiqQueue.first).to have_attributes(
:args => [manager.id, params],
:class_name => described_class.name,
:method_name => "create_in_provider",
:priority => MiqQueue::HIGH_PRIORITY,
:role => "embedded_ansible",
:zone => nil
)
end
it "#update_in_provider_queue" do
project = described_class.create!(:manager => manager, :name => "config_script.yml")
task_id = project.update_in_provider_queue(params)
expected_args = params.tap { |p| p[:task_id] = task_id }
expect(MiqTask.find(task_id)).to have_attributes(:name => "Updating #{described_class::FRIENDLY_NAME} (name=config_script.yml)")
expect(MiqQueue.first).to have_attributes(
:instance_id => project.id,
:args => [expected_args],
:class_name => described_class.name,
:method_name => "update_in_provider",
:priority => MiqQueue::HIGH_PRIORITY,
:role => "embedded_ansible",
:zone => nil
)
end
end
end
| 39.416268 | 166 | 0.67395 |
f823319f762d54d58b4f907cc5e56528d523e6a5 | 952 | # frozen_string_literal: false
require "tkclass"
$tkline_init = false
def start_random
return if $tkline_init
$tkline_init = true
if defined? Thread
Thread.start do
loop do
sleep 2
Line.new($c, rand(400), rand(200), rand(400), rand(200))
end
end
end
end
Label.new('text'=>'Please press or drag button-1').pack
$c = Canvas.new
$c.pack
$start_x = start_y = 0
def do_press(x, y)
$start_x = x
$start_y = y
$current_line = Line.new($c, x, y, x, y)
start_random
end
def do_motion(x, y)
if $current_line
$current_line.coords $start_x, $start_y, x, y
end
end
def do_release(x, y)
if $current_line
$current_line.coords $start_x, $start_y, x, y
$current_line.fill 'black'
$current_line = nil
end
end
$c.bind("1", proc{|e| do_press e.x, e.y})
$c.bind("B1-Motion", proc{|x, y| do_motion x, y}, "%x %y")
$c.bind("ButtonRelease-1", proc{|x, y| do_release x, y}, "%x %y")
Tk.mainloop
| 19.428571 | 65 | 0.638655 |
b939b7758d46bab9e2bf6f40c1be1e45c0ee7b32 | 2,062 | module RequestLogAnalyzer::RSpec::Mocks
def mock_source
source = mock('RequestLogAnalyzer::Source::Base')
source.stub!(:file_format).and_return(testing_format)
source.stub!(:parsed_requests).and_return(2)
source.stub!(:skipped_requests).and_return(1)
source.stub!(:parse_lines).and_return(10)
source.stub!(:warning=)
source.stub!(:progress=)
source.stub!(:source_changes=)
source.stub!(:prepare)
source.stub!(:finalize)
source.stub!(:each_request).
and_yield(testing_format.request(:field => 'value1')).
and_yield(testing_format.request(:field => 'value2'))
return source
end
def mock_io
mio = mock('IO')
mio.stub!(:print)
mio.stub!(:puts)
mio.stub!(:write)
return mio
end
def mock_output
output = mock('RequestLogAnalyzer::Output::Base')
output.stub!(:report_tracker)
output.stub!(:header)
output.stub!(:footer)
output.stub!(:puts)
output.stub!(:<<)
output.stub!(:colorize).and_return("Fancy text")
output.stub!(:link)
output.stub!(:title)
output.stub!(:line)
output.stub!(:with_style)
output.stub!(:table).and_yield([])
output.stub!(:io).and_return(mock_io)
output.stub!(:options).and_return({})
output.stub!(:slice_results).and_return { |a| a }
return output
end
def mock_database(*stubs)
database = mock('RequestLogAnalyzer::Database')
database.stub!(:connect)
database.stub!(:disconnect)
database.stub!(:connection).and_return(mock_connection)
stubs.each { |s| database.stub!(s)}
return database
end
def mock_connection
table_creator = mock('ActiveRecord table creator')
table_creator.stub!(:column)
connection = mock('ActiveRecord::Base.connection')
connection.stub!(:add_index)
connection.stub!(:remove_index)
connection.stub!(:table_exists?).and_return(false)
connection.stub!(:create_table).and_yield(table_creator).and_return(true)
connection.stub!(:table_creator).and_return(table_creator)
return connection
end
end
| 28.246575 | 77 | 0.684287 |
03e79fdf13bccf93a2974ddf8982f3a8233daa56 | 58 | module RailsSemanticLogger
VERSION = '4.4.2'.freeze
end
| 14.5 | 26 | 0.758621 |
3319d5c0b0d788c873482a75b48798c78712a851 | 2,186 | require 'rails_helper'
RSpec.describe TeamUsersController, type: :controller do
include Devise::Test::ControllerHelpers
before(:each) do
request.env["HTTP_ACCEPT"] = 'application/json'
@request.env["devise.mapping"] = Devise.mappings[:user]
@current_user = FactoryGirl.create(:user)
sign_in @current_user
end
describe "GET #crete" do
# Sem isto os testes não renderizam o json
render_views
context "Team owner" do
before(:each) do
@team = create(:team, user: @current_user)
@guest_user = create(:user)
post :create, params: { team_user: { email: @guest_user.email, team_id: @team.id } }
end
it "returns http success" do
expect(response).to have_http_status(:success)
end
it "Return the right params" do
response_hash = JSON.parse(response.body)
expect(response_hash["user"]["name"]).to eql(@guest_user.name)
expect(response_hash["user"]["email"]).to eql(@guest_user.email)
expect(response_hash["team_id"]).to eql(@team.id)
end
end
context "Team not owner" do
before(:each) do
@team = create(:team)
@guest_user = create(:user)
end
it "returns http forbidden" do
post :create, params: { team_user: { email: @guest_user.email, team_id: @team.id } }
expect(response).to have_http_status(:forbidden)
end
end
end
describe "GET #destroy" do
context "Team owner" do
before(:each) do
@team = create(:team, user: @current_user)
@guest_user = create(:user)
@team.users << @guest_user
end
it "returns http success" do
delete :destroy, params: { id: @guest_user.id, team_id: @team.id }
expect(response).to have_http_status(:success)
end
end
context "Team not owner" do
before(:each) do
@team = create(:team)
@guest_user = create(:user)
@team.users << @guest_user
end
it "returns http forbidden" do
delete :destroy, params: { id: @guest_user.id, team_id: @team.id }
expect(response).to have_http_status(:forbidden)
end
end
end
end | 27.325 | 92 | 0.620769 |
03a44dea6f2e9f287341ce1e437245249c9b664a | 1,311 | # -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
require 'acts_as_list/version'
Gem::Specification.new do |s|
# Description Meta...
s.name = 'acts_as_list'
s.version = ActiveRecord::Acts::List::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ['David Heinemeier Hansson', 'Swanand Pagnis', 'Quinn Chaffee']
s.email = ['[email protected]']
s.homepage = 'http://github.com/swanandp/acts_as_list'
s.summary = %q{A gem allowing a active_record model to act_as_list.}
s.description = %q{This "acts_as" extension provides the capabilities for sorting and reordering a number of objects in a list. The class that has this specified needs to have a "position" column defined as an integer on the mapped database table.}
s.license = 'MIT'
s.rubyforge_project = 'acts_as_list'
s.required_ruby_version = '>= 1.9.2'
# Load Paths...
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ['lib']
# Dependencies (installed via 'bundle install')...
s.add_dependency("activerecord", [">= 3.0"])
s.add_development_dependency("bundler", [">= 1.0.0"])
end
| 42.290323 | 250 | 0.661327 |
62c0f00753826f556d18e4f41097d18c257308e1 | 613 | name 'selenium'
maintainer 'Dennis Hoer'
maintainer_email '[email protected]'
license 'MIT'
description 'Installs/Configures Selenium'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
source_url 'https://github.com/dhoer/chef-selenium'
issues_url 'https://github.com/dhoer/chef-selenium/issues'
version '5.0.1'
chef_version '>= 12.14'
supports 'centos'
supports 'debian'
supports 'fedora'
supports 'mac_os_x'
supports 'redhat'
supports 'ubuntu'
supports 'windows'
depends 'macosx_autologin', '>= 4.0'
depends 'nssm', '>= 4.0'
depends 'windows'
depends 'windows_autologin', '>= 3.0'
| 24.52 | 72 | 0.755302 |
b9bdff2f46329fc8914b808df68c00fb4ceeaf02 | 1,865 | # frozen_string_literal: true
# The MIT License (MIT)
#
# Copyright <YEAR> <COPYRIGHT HOLDER>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# [START testing_v0_generated_AllSubclientsConsumer_NonCopyAnotherLroRpc_sync]
require "testing/nonstandard_lro_grpc"
# Create a client object. The client can be reused for multiple calls.
client = Testing::NonstandardLroGrpc::AllSubclientsConsumer::Client.new
# Create a request. To set request fields, pass in keyword arguments.
request = Testing::NonstandardLroGrpc::NonCopyRequest.new
# Call the non_copy_another_lro_rpc method.
result = client.non_copy_another_lro_rpc request
# The returned object is of type Testing::NonstandardLroGrpc::NonstandardOperation.
p result
# [END testing_v0_generated_AllSubclientsConsumer_NonCopyAnotherLroRpc_sync]
| 44.404762 | 83 | 0.799464 |
bb40cb3ea9cf224cf194cc493010104ba2b088dc | 4,979 | # encoding: UTF-8
#
# Author:: Geoff Meakin
# Author:: Matt Ray (<[email protected]>)
#
# Copyright:: 2012-2014, Chef Software, Inc <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef"
module Spiceweasel
# models the existing Chef repository as a manifest
class ExtractLocal
def self.parse_objects # rubocop:disable CyclomaticComplexity
objects = {}
# BERKSHELF
berksfile = parse_berkshelf(objects)
# COOKBOOKS
cookbooks = berksfile ? resolve_cookbooks(berksfile.cookbook_list) : resolve_cookbooks
objects["cookbooks"] = cookbooks.sort_by { |c| [c.keys[0]] } unless cookbooks.empty?
# ROLES
parse_roles(objects)
# ENVIRONMENTS
environments = []
Dir.glob("environments/*.{rb,json}").each do |environment_full_path|
environment = grab_name_from_path(environment_full_path)
Spiceweasel::Log.debug("dir_ext: environment: '#{environment}'")
environments << { environment => nil }
end
objects["environments"] = environments.sort_by { |e| [e.keys[0]] } unless environments.empty?
# DATA BAGS
data_bags = parse_data_bags
objects["data bags"] = data_bags.sort_by { |d| [d.keys[0]] } unless data_bags.empty?
# NODES
# TODO: Cant use this yet as node_list.rb doesnt support node from file syntax but expects the node info to be part of the objects passed in
# nodes = []
# Dir.glob("nodes/*.{rb,json}").each do |node_full_path|
# node = self.grab_name_from_path(node_full_path)
# nodes << {node => nil}
# end
# objects['nodes'] = nodes unless nodes.empty?
objects
end
def self.parse_data_bags
data_bags = []
Dir.glob("data_bags/*").each do |data_bag_full_path|
next unless File.directory?(data_bag_full_path)
data_bag = data_bag_full_path.split("/").last
Spiceweasel::Log.debug("dir_ext: data_bag: '#{data_bag}'")
data_bag_items = []
Dir.glob("#{data_bag_full_path}/*.{rb,json}").each do |data_bag_item_full_path|
Spiceweasel::Log.debug("dir_ext: data_bag: '#{data_bag}':'#{data_bag_item_full_path}'")
data_bag_items << grab_name_from_path(data_bag_item_full_path)
end if File.directory?(data_bag_full_path)
data_bags << { data_bag => { "items" => data_bag_items.sort } }
end
data_bags
end
def self.parse_roles(objects)
roles = []
Dir.glob("roles/*.{rb,json}").each do |role_full_path|
role = grab_name_from_path(role_full_path)
Spiceweasel::Log.debug("dir_ext: role: '#{role}'")
roles << { role => nil }
end
objects["roles"] = roles.sort_by { |r| [r.keys[0]] } unless roles.nil? || roles.empty?
end
def self.parse_berkshelf(objects)
if File.file?("./Berksfile")
objects["berksfile"] = nil
berksfile = Berksfile.new(objects["berksfile"])
end
berksfile
end
def self.grab_name_from_path(path)
name = path.split("/").last.split(".")
name.pop if name.length > 1
name.join(".")
end
def self.resolve_cookbooks(berkshelf_cookbooks = {})
require "solve"
loader = Chef::CookbookLoader.new(Spiceweasel::Config[:cookbook_dir])
loader.load_cookbooks
books = loader.cookbooks_by_name
graph = Solve::Graph.new
cblist = []
# push in the berkshelf cookbooks to cover any other deps
berkshelf_cookbooks.each do |name, version|
Spiceweasel::Log.debug("dir_ext:berks: #{name} #{version}")
graph.artifact(name, version)
end
books.each do |name, cb|
Spiceweasel::Log.debug("dir_ext: #{name} #{cb.version}")
artifact = graph.artifact(name, cb.version)
cblist.push([name, cb.version])
cb.metadata.dependencies.each do |dep_name, dep_version|
artifact.depends(dep_name, dep_version)
end
end
# get the cookbooks and their versions, map to cookbook hash format
begin
cookbooks = []
Solve.it!(graph, cblist).each { |k, v| cookbooks.push(k => { "version" => v }) }
rescue Solve::Errors::NoSolutionError
STDERR.puts "ERROR: There are missing cookbook dependencies, please check your metadata.rb files."
exit(-1)
end
# remove any cookbooks managed by berkshelf
cookbooks.delete_if { |x| berkshelf_cookbooks.keys.member?(x.keys[0]) }
end
end
end
| 36.610294 | 146 | 0.65234 |
015d04570ecd184ef125dac3f9e1623b18367b38 | 16,675 | require 'puppet/util/windows'
require 'win32/process'
require 'ffi'
module Puppet::Util::Windows::Process
extend Puppet::Util::Windows::String
extend FFI::Library
WAIT_TIMEOUT = 0x102
def execute(command, arguments, stdin, stdout, stderr)
Process.create( :command_line => command, :startup_info => {:stdin => stdin, :stdout => stdout, :stderr => stderr}, :close_handles => false )
end
module_function :execute
def wait_process(handle)
while WaitForSingleObject(handle, 0) == WAIT_TIMEOUT
sleep(1)
end
exit_status = -1
FFI::MemoryPointer.new(:dword, 1) do |exit_status_ptr|
if GetExitCodeProcess(handle, exit_status_ptr) == FFI::WIN32_FALSE
raise Puppet::Util::Windows::Error.new(_("Failed to get child process exit code"))
end
exit_status = exit_status_ptr.read_dword
# $CHILD_STATUS is not set when calling win32/process Process.create
# and since it's read-only, we can't set it. But we can execute a
# a shell that simply returns the desired exit status, which has the
# desired effect.
%x{#{ENV['COMSPEC']} /c exit #{exit_status}}
end
exit_status
end
module_function :wait_process
def get_current_process
# this pseudo-handle does not require closing per MSDN docs
GetCurrentProcess()
end
module_function :get_current_process
def open_process_token(handle, desired_access, &block)
token_handle = nil
begin
FFI::MemoryPointer.new(:handle, 1) do |token_handle_ptr|
result = OpenProcessToken(handle, desired_access, token_handle_ptr)
if result == FFI::WIN32_FALSE
raise Puppet::Util::Windows::Error.new(
"OpenProcessToken(#{handle}, #{desired_access.to_s(8)}, #{token_handle_ptr})")
end
yield token_handle = token_handle_ptr.read_handle
end
token_handle
ensure
FFI::WIN32.CloseHandle(token_handle) if token_handle
end
# token_handle has had CloseHandle called against it, so nothing to return
nil
end
module_function :open_process_token
# Execute a block with the current process token
def with_process_token(access, &block)
handle = get_current_process
open_process_token(handle, access) do |token_handle|
yield token_handle
end
# all handles have been closed, so nothing to safely return
nil
end
module_function :with_process_token
def lookup_privilege_value(name, system_name = '', &block)
FFI::MemoryPointer.new(LUID.size) do |luid_ptr|
result = LookupPrivilegeValueW(
wide_string(system_name),
wide_string(name.to_s),
luid_ptr
)
if result == FFI::WIN32_FALSE
raise Puppet::Util::Windows::Error.new(
"LookupPrivilegeValue(#{system_name}, #{name}, #{luid_ptr})")
end
yield LUID.new(luid_ptr)
end
# the underlying MemoryPointer for LUID is cleaned up by this point
nil
end
module_function :lookup_privilege_value
def get_token_information(token_handle, token_information, &block)
# to determine buffer size
FFI::MemoryPointer.new(:dword, 1) do |return_length_ptr|
result = GetTokenInformation(token_handle, token_information, nil, 0, return_length_ptr)
return_length = return_length_ptr.read_dword
if return_length <= 0
raise Puppet::Util::Windows::Error.new(
"GetTokenInformation(#{token_handle}, #{token_information}, nil, 0, #{return_length_ptr})")
end
# re-call API with properly sized buffer for all results
FFI::MemoryPointer.new(return_length) do |token_information_buf|
result = GetTokenInformation(token_handle, token_information,
token_information_buf, return_length, return_length_ptr)
if result == FFI::WIN32_FALSE
raise Puppet::Util::Windows::Error.new(
"GetTokenInformation(#{token_handle}, #{token_information}, #{token_information_buf}, " +
"#{return_length}, #{return_length_ptr})")
end
yield token_information_buf
end
end
# GetTokenInformation buffer has been cleaned up by this point, nothing to return
nil
end
module_function :get_token_information
def parse_token_information_as_token_privileges(token_information_buf)
raw_privileges = TOKEN_PRIVILEGES.new(token_information_buf)
privileges = { :count => raw_privileges[:PrivilegeCount], :privileges => [] }
offset = token_information_buf + TOKEN_PRIVILEGES.offset_of(:Privileges)
privilege_ptr = FFI::Pointer.new(LUID_AND_ATTRIBUTES, offset)
# extract each instance of LUID_AND_ATTRIBUTES
0.upto(privileges[:count] - 1) do |i|
privileges[:privileges] << LUID_AND_ATTRIBUTES.new(privilege_ptr[i])
end
privileges
end
module_function :parse_token_information_as_token_privileges
def parse_token_information_as_token_elevation(token_information_buf)
TOKEN_ELEVATION.new(token_information_buf)
end
module_function :parse_token_information_as_token_elevation
TOKEN_ALL_ACCESS = 0xF01FF
ERROR_NO_SUCH_PRIVILEGE = 1313
def process_privilege_symlink?
privilege_symlink = false
handle = get_current_process
open_process_token(handle, TOKEN_ALL_ACCESS) do |token_handle|
lookup_privilege_value('SeCreateSymbolicLinkPrivilege') do |luid|
get_token_information(token_handle, :TokenPrivileges) do |token_info|
token_privileges = parse_token_information_as_token_privileges(token_info)
privilege_symlink = token_privileges[:privileges].any? { |p| p[:Luid].values == luid.values }
end
end
end
privilege_symlink
rescue Puppet::Util::Windows::Error => e
if e.code == ERROR_NO_SUCH_PRIVILEGE
false # pre-Vista
else
raise e
end
end
module_function :process_privilege_symlink?
TOKEN_QUERY = 0x0008
# Returns whether or not the owner of the current process is running
# with elevated security privileges.
#
# Only supported on Windows Vista or later.
#
def elevated_security?
# default / pre-Vista
elevated = false
handle = nil
begin
handle = get_current_process
open_process_token(handle, TOKEN_QUERY) do |token_handle|
get_token_information(token_handle, :TokenElevation) do |token_info|
token_elevation = parse_token_information_as_token_elevation(token_info)
# TokenIsElevated member of the TOKEN_ELEVATION struct
elevated = token_elevation[:TokenIsElevated] != 0
end
end
elevated
rescue Puppet::Util::Windows::Error => e
raise e if e.code != ERROR_NO_SUCH_PRIVILEGE
ensure
FFI::WIN32.CloseHandle(handle) if handle
end
end
module_function :elevated_security?
def windows_major_version
ver = 0
FFI::MemoryPointer.new(OSVERSIONINFO.size) do |os_version_ptr|
os_version = OSVERSIONINFO.new(os_version_ptr)
os_version[:dwOSVersionInfoSize] = OSVERSIONINFO.size
result = GetVersionExW(os_version_ptr)
if result == FFI::WIN32_FALSE
raise Puppet::Util::Windows::Error.new(_("GetVersionEx failed"))
end
ver = os_version[:dwMajorVersion]
end
ver
end
module_function :windows_major_version
# Returns a hash of the current environment variables encoded as UTF-8
# The memory block returned from GetEnvironmentStringsW is double-null terminated and the vars are paired as below;
# Var1=Value1\0
# Var2=Value2\0
# VarX=ValueX\0\0
# Note - Some env variable names start with '=' and are excluded from the return value
# Note - The env_ptr MUST be freed using the FreeEnvironmentStringsW function
# Note - There is no technical limitation to the size of the environment block returned.
# However a pracitcal limit of 64K is used as no single environment variable can exceed 32KB
def get_environment_strings
env_ptr = GetEnvironmentStringsW()
# pass :invalid => :replace to the Ruby String#encode to use replacement characters
pairs = env_ptr.read_arbitrary_wide_string_up_to(65534, :double_null, { :invalid => :replace })
.split(?\x00)
.reject { |env_str| env_str.nil? || env_str.empty? || env_str[0] == '=' }
.reject do |env_str|
# reject any string containing the Unicode replacement character
if env_str.include?("\uFFFD")
Puppet.warning("Discarding environment variable #{env_str} which contains invalid bytes")
true
end
end
.map { |env_pair| env_pair.split('=', 2) }
Hash[ pairs ]
ensure
if env_ptr && ! env_ptr.null?
if FreeEnvironmentStringsW(env_ptr) == FFI::WIN32_FALSE
Puppet.debug "FreeEnvironmentStringsW memory leak"
end
end
end
module_function :get_environment_strings
def set_environment_variable(name, val)
raise Puppet::Util::Windows::Error(_('environment variable name must not be nil or empty')) if ! name || name.empty?
FFI::MemoryPointer.from_string_to_wide_string(name) do |name_ptr|
if (val.nil?)
if SetEnvironmentVariableW(name_ptr, FFI::MemoryPointer::NULL) == FFI::WIN32_FALSE
raise Puppet::Util::Windows::Error.new(_("Failed to remove environment variable: %{name}") % { name: name })
end
else
FFI::MemoryPointer.from_string_to_wide_string(val) do |val_ptr|
if SetEnvironmentVariableW(name_ptr, val_ptr) == FFI::WIN32_FALSE
raise Puppet::Util::Windows::Error.new(_("Failed to set environment variable: %{name}") % { name: name })
end
end
end
end
end
module_function :set_environment_variable
def get_system_default_ui_language
GetSystemDefaultUILanguage()
end
module_function :get_system_default_ui_language
# Returns whether or not the OS has the ability to set elevated
# token information.
#
# Returns true on Windows Vista or later, otherwise false
#
def supports_elevated_security?
windows_major_version >= 6
end
module_function :supports_elevated_security?
ABOVE_NORMAL_PRIORITY_CLASS = 0x0008000
BELOW_NORMAL_PRIORITY_CLASS = 0x0004000
HIGH_PRIORITY_CLASS = 0x0000080
IDLE_PRIORITY_CLASS = 0x0000040
NORMAL_PRIORITY_CLASS = 0x0000020
REALTIME_PRIORITY_CLASS = 0x0000010
ffi_convention :stdcall
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032(v=vs.85).aspx
# DWORD WINAPI WaitForSingleObject(
# _In_ HANDLE hHandle,
# _In_ DWORD dwMilliseconds
# );
ffi_lib :kernel32
attach_function_private :WaitForSingleObject,
[:handle, :dword], :dword
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms683189(v=vs.85).aspx
# BOOL WINAPI GetExitCodeProcess(
# _In_ HANDLE hProcess,
# _Out_ LPDWORD lpExitCode
# );
ffi_lib :kernel32
attach_function_private :GetExitCodeProcess,
[:handle, :lpdword], :win32_bool
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms683179(v=vs.85).aspx
# HANDLE WINAPI GetCurrentProcess(void);
ffi_lib :kernel32
attach_function_private :GetCurrentProcess, [], :handle
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms683187(v=vs.85).aspx
# LPTCH GetEnvironmentStrings(void);
ffi_lib :kernel32
attach_function_private :GetEnvironmentStringsW, [], :pointer
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms683151(v=vs.85).aspx
# BOOL FreeEnvironmentStrings(
# _In_ LPTCH lpszEnvironmentBlock
# );
ffi_lib :kernel32
attach_function_private :FreeEnvironmentStringsW,
[:pointer], :win32_bool
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms686206(v=vs.85).aspx
# BOOL WINAPI SetEnvironmentVariableW(
# _In_ LPCTSTR lpName,
# _In_opt_ LPCTSTR lpValue
# );
ffi_lib :kernel32
attach_function_private :SetEnvironmentVariableW,
[:lpcwstr, :lpcwstr], :win32_bool
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa379295(v=vs.85).aspx
# BOOL WINAPI OpenProcessToken(
# _In_ HANDLE ProcessHandle,
# _In_ DWORD DesiredAccess,
# _Out_ PHANDLE TokenHandle
# );
ffi_lib :advapi32
attach_function_private :OpenProcessToken,
[:handle, :dword, :phandle], :win32_bool
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa379261(v=vs.85).aspx
# typedef struct _LUID {
# DWORD LowPart;
# LONG HighPart;
# } LUID, *PLUID;
class LUID < FFI::Struct
layout :LowPart, :dword,
:HighPart, :win32_long
end
# https://msdn.microsoft.com/en-us/library/Windows/desktop/aa379180(v=vs.85).aspx
# BOOL WINAPI LookupPrivilegeValue(
# _In_opt_ LPCTSTR lpSystemName,
# _In_ LPCTSTR lpName,
# _Out_ PLUID lpLuid
# );
ffi_lib :advapi32
attach_function_private :LookupPrivilegeValueW,
[:lpcwstr, :lpcwstr, :pointer], :win32_bool
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa379626(v=vs.85).aspx
TOKEN_INFORMATION_CLASS = enum(
:TokenUser, 1,
:TokenGroups,
:TokenPrivileges,
:TokenOwner,
:TokenPrimaryGroup,
:TokenDefaultDacl,
:TokenSource,
:TokenType,
:TokenImpersonationLevel,
:TokenStatistics,
:TokenRestrictedSids,
:TokenSessionId,
:TokenGroupsAndPrivileges,
:TokenSessionReference,
:TokenSandBoxInert,
:TokenAuditPolicy,
:TokenOrigin,
:TokenElevationType,
:TokenLinkedToken,
:TokenElevation,
:TokenHasRestrictions,
:TokenAccessInformation,
:TokenVirtualizationAllowed,
:TokenVirtualizationEnabled,
:TokenIntegrityLevel,
:TokenUIAccess,
:TokenMandatoryPolicy,
:TokenLogonSid,
:TokenIsAppContainer,
:TokenCapabilities,
:TokenAppContainerSid,
:TokenAppContainerNumber,
:TokenUserClaimAttributes,
:TokenDeviceClaimAttributes,
:TokenRestrictedUserClaimAttributes,
:TokenRestrictedDeviceClaimAttributes,
:TokenDeviceGroups,
:TokenRestrictedDeviceGroups,
:TokenSecurityAttributes,
:TokenIsRestricted,
:MaxTokenInfoClass
)
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa379263(v=vs.85).aspx
# typedef struct _LUID_AND_ATTRIBUTES {
# LUID Luid;
# DWORD Attributes;
# } LUID_AND_ATTRIBUTES, *PLUID_AND_ATTRIBUTES;
class LUID_AND_ATTRIBUTES < FFI::Struct
layout :Luid, LUID,
:Attributes, :dword
end
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa379630(v=vs.85).aspx
# typedef struct _TOKEN_PRIVILEGES {
# DWORD PrivilegeCount;
# LUID_AND_ATTRIBUTES Privileges[ANYSIZE_ARRAY];
# } TOKEN_PRIVILEGES, *PTOKEN_PRIVILEGES;
class TOKEN_PRIVILEGES < FFI::Struct
layout :PrivilegeCount, :dword,
:Privileges, [LUID_AND_ATTRIBUTES, 1] # placeholder for offset
end
# https://msdn.microsoft.com/en-us/library/windows/desktop/bb530717(v=vs.85).aspx
# typedef struct _TOKEN_ELEVATION {
# DWORD TokenIsElevated;
# } TOKEN_ELEVATION, *PTOKEN_ELEVATION;
class TOKEN_ELEVATION < FFI::Struct
layout :TokenIsElevated, :dword
end
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa446671(v=vs.85).aspx
# BOOL WINAPI GetTokenInformation(
# _In_ HANDLE TokenHandle,
# _In_ TOKEN_INFORMATION_CLASS TokenInformationClass,
# _Out_opt_ LPVOID TokenInformation,
# _In_ DWORD TokenInformationLength,
# _Out_ PDWORD ReturnLength
# );
ffi_lib :advapi32
attach_function_private :GetTokenInformation,
[:handle, TOKEN_INFORMATION_CLASS, :lpvoid, :dword, :pdword ], :win32_bool
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms724834%28v=vs.85%29.aspx
# typedef struct _OSVERSIONINFO {
# DWORD dwOSVersionInfoSize;
# DWORD dwMajorVersion;
# DWORD dwMinorVersion;
# DWORD dwBuildNumber;
# DWORD dwPlatformId;
# TCHAR szCSDVersion[128];
# } OSVERSIONINFO;
class OSVERSIONINFO < FFI::Struct
layout(
:dwOSVersionInfoSize, :dword,
:dwMajorVersion, :dword,
:dwMinorVersion, :dword,
:dwBuildNumber, :dword,
:dwPlatformId, :dword,
:szCSDVersion, [:wchar, 128]
)
end
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms724451(v=vs.85).aspx
# BOOL WINAPI GetVersionEx(
# _Inout_ LPOSVERSIONINFO lpVersionInfo
# );
ffi_lib :kernel32
attach_function_private :GetVersionExW,
[:pointer], :win32_bool
# https://msdn.microsoft.com/en-us/library/windows/desktop/dd318123(v=vs.85).aspx
# LANGID GetSystemDefaultUILanguage(void);
ffi_lib :kernel32
attach_function_private :GetSystemDefaultUILanguage, [], :word
end
| 33.483936 | 145 | 0.706927 |
1abf6c93bfa9a2bd90e212d10b4f57650899012e | 582 | require 'rails_helper'
describe 'Form Submission with Enter Key', js: true do
before do
login
draft = create(:empty_variable_draft, user: User.where(urs_uid: 'testuser').first)
visit edit_variable_draft_path(draft)
end
context 'when pressing enter on a form' do
before do
fill_in 'Name', with: 'Name'
page.find('#variable_draft_draft_name').native.send_keys(:enter)
end
it 'does not submit the form' do
expect(page).to have_no_content('This page has invalid data. Are you sure you want to save it and proceed?')
end
end
end
| 26.454545 | 114 | 0.701031 |
1aa2fe73b2b88b6d3a8a02d8e1709bf184266f76 | 4,849 | require 'net/http'
require 'cgi/util'
require 'nokogiri'
require 'xmlrpc/client'
require 'zlib'
require 'stringio'
module Suby
class Downloader
DOWNLOADERS = []
def self.inherited(downloader)
DOWNLOADERS << downloader
end
attr_reader :show, :season, :episode, :video_data, :file, :lang
def initialize(file, *args)
@file = file
@lang = (args.last || 'en').to_sym
@video_data = FilenameParser.parse(file)
if video_data[:type] == :tvshow
@show, @season, @episode = video_data.values_at(:show, :season, :episode)
end
end
def support_video_type?
self.class::SUBTITLE_TYPES.include? video_data[:type]
end
def to_s
self.class.name.sub(/^.+::/, '')
end
def http
@http ||= Net::HTTP.new(self.class::SITE).start
end
def xmlrpc
@xmlrpc ||= XMLRPC::Client.new(self.class::XMLRPC_HOST, self.class::XMLRPC_PATH).tap do |xmlrpc|
xmlrpc.http_header_extra = { 'accept-encoding' => 'identity' } if RbConfig::CONFIG['MAJOR'] == '2'
end
end
def get(path, initheader = {}, parse_response = true)
if host = URI.parse(path).host and host != self.class::SITE
raise DownloaderError, "Cross-Origin request not supported yet (#{host})"
end
response = http.get(path, initheader)
if parse_response
unless Net::HTTPSuccess === response
raise DownloaderError, "Invalid response for #{path}: #{response}"
end
response.body
else
response
end
end
def post(path, data = {}, initheader = {})
post = Net::HTTP::Post.new(path, initheader)
post.form_data = data
response = http.request(post)
unless Net::HTTPSuccess === response
raise DownloaderError, "Invalid response for #{path}(#{data}): " +
response.inspect
end
response.body
end
def get_redirection(path, initheader = {})
response = http.get(path, initheader)
location = response['Location']
unless (Net::HTTPFound === response or
Net::HTTPSuccess === response) and location
raise DownloaderError, "Invalid response for #{path}: " +
"#{response}: location: #{location.inspect}, #{response.body}"
end
location
end
def download
begin
extract download_url
rescue Timeout::Error, Errno::ECONNREFUSED, Errno::EINVAL,
Errno::ECONNRESET, EOFError, RuntimeError, Net::HTTPBadResponse,
Net::HTTPHeaderSyntaxError, Net::ProtocolError => error
raise Suby::DownloaderError, error.message
end
end
def subtitles(url_or_response = download_url)
if Net::HTTPSuccess === url_or_response
url_or_response.body
else
get(url_or_response)
end
end
def extract(url_or_response)
contents = subtitles(url_or_response)
http.finish
format = self.class::FORMAT
case format
when :file
# nothing special to do
when :gz
begin
gz = Zlib::GzipReader.new(StringIO.new(contents))
contents = gz.read
ensure
gz.close if gz
end
when :zip
TEMP_ARCHIVE.write contents
Suby.extract_sub_from_archive(TEMP_ARCHIVE, format, TEMP_SUBTITLES)
contents = TEMP_SUBTITLES.read
else
raise "unknown subtitles format: #{format}"
end
sub_name(contents).write encode contents
end
def sub_name(contents)
file.sub_ext sub_extension(contents)
end
def sub_extension(contents)
if contents[0..10] =~ /1\r?\n/
'srt'
else
'sub'
end
end
def imdbid
@imdbid ||= begin
nfo_file = find_nfo_file
convert_to_utf8_from_latin1(nfo_file.read)[%r!imdb\.[^/]+/title/tt(\d+)!i, 1] if nfo_file
end
end
def find_nfo_file
@file.dir.children.find { |file| file.ext == "nfo" }
end
def convert_to_utf8_from_latin1(content)
if content.valid_encoding?
content
else
enc = content.encoding
if content.force_encoding("ISO-8859-1").valid_encoding?
yield if block_given?
content.encode("UTF-8")
else
# restore original encoding
subtitles.force_encoding(enc)
end
end
end
def success_message
"Found"
end
def encode(subtitles)
if @lang == :fr
convert_to_utf8_from_latin1(subtitles) do
def self.success_message
"#{super} (transcoded from ISO-8859-1)"
end
end
else
subtitles
end
end
end
end
# Defines downloader order
%w[
opensubtitles
tvsubtitles
addic7ed
].each { |downloader| require_relative "downloader/#{downloader}" }
| 26.210811 | 106 | 0.605898 |
79e85fe860cab59694f12c135332cd7174058270 | 694 | class DiffSoFancy < Formula
desc "Good-lookin' diffs with diff-highlight and more"
homepage "https://github.com/so-fancy/diff-so-fancy"
url "https://github.com/so-fancy/diff-so-fancy/archive/v0.9.3.tar.gz"
sha256 "b057683f325874f2473ebfd90583083efc18253cd42fe8d16bcd25bbe426babb"
bottle :unneeded
def install
# temporary fix until upstream uses a directory other
# than lib for the perl script.
inreplace "diff-so-fancy", "/lib/", "/libexec/"
prefix.install "lib" => "libexec"
prefix.install Dir["third_party", "diff-so-fancy"]
bin.install_symlink prefix/"diff-so-fancy"
end
test do
ENV["TERM"] = "xterm"
system bin/"diff-so-fancy"
end
end
| 28.916667 | 75 | 0.707493 |
4a51e50a34c9b8e9d0063a5f346217cf28f73362 | 1,781 | class TextItemHeaderMigrator < SimpleService
def initialize(item)
@item = item
end
def call
return unless @item.is_a?(Item::TextItem) && @item.ops.present?
@item.ops = modify_quill_ops
@item.update_columns(data_content: @item.data_content)
end
private
def modify_quill_ops
ops = []
@item.ops.each_with_index do |op, i|
op = Mashie.new(op)
# this will be true on the newline, we actually want to apply size to the previous op
header = op.attributes&.header
# we are keeping h5 for the title style
unless i.positive? && header.present? && header != 5
ops[i] = op
next
end
size = op.attributes.header == 1 ? 'huge' : 'large'
previous_op = Mashie.new(@item.ops[i - 1])
next if previous_op&.insert.nil?
if previous_op.insert.include?("\n")
# we have to split up the previous insert, move it to this op
previous_op_split = previous_op.insert.split("\n")
# pop the remainder off
op.insert = "#{previous_op_split.pop}\n"
previous_op.insert = "#{previous_op_split.join("\n")}\n"
# modify current op (which would just be a "\n" for a quill header)
op.attributes ||= {}
op.attributes.size = size
# add newline to the next op
if @item.ops[i + 1].present?
@item.ops[i + 1]['insert'].insert 0, "\n"
else
ops[i + 1] = { insert: "\n" }
end
else
previous_op.attributes ||= {}
previous_op.attributes.size = size
end
op.attributes.delete(:header)
op.delete(:attributes) if op.attributes.empty?
ops[i] = op
ops[i - 1] = previous_op
end
ops
rescue
puts "error migrating item #{@item.id}"
end
end
| 27.828125 | 91 | 0.597417 |
18a5e4fdb114f5424e35d1e8632e2272cfb480d2 | 426 | # external dependencies
require 'curses'
require 'singleton'
# internal dependencies
require 'ascii_art/painter'
require 'ascii_art/canvas'
require 'ascii_art/brush'
require 'ascii_art/printer'
require 'ascii_art/scanner'
module AsciiArt
def self.start(filename=nil)
Painter.new(filename).paint
end
def self.start_with_scan(load_file, print_file=nil)
Painter.new(print_file).load_drawing(load_file)
end
end
| 20.285714 | 53 | 0.786385 |
792b40f4497067404909101f2db246c7f27b63ef | 2,225 | class Awscli < Formula
include Language::Python::Virtualenv
desc "Official Amazon AWS command-line interface"
homepage "https://aws.amazon.com/cli/"
url "https://github.com/aws/aws-cli/archive/2.2.12.tar.gz"
sha256 "4a7fe6f1be73d454b0f506de5ec9f9e78ead3cb04ad8c3b4f6087f9d102d65e6"
license "Apache-2.0"
head "https://github.com/aws/aws-cli.git", branch: "v2"
bottle do
sha256 cellar: :any, arm64_big_sur: "348360e46249a7641c737245a22e838dfae0819e983b8bcd4554777a16165a53"
sha256 big_sur: "d6fb3893dbc440cd466bccf330f0fba4101ed291dfd11d188ddbc022171bb65a"
sha256 catalina: "edcadbdf33d436d118940c78c21fc28df82b626a76e10721407046ad81041519"
sha256 mojave: "2236651ba2cfe8ff85dd9e5028e5189a939e54510aac8a8581a70f33a2f80278"
end
depends_on "cmake" => :build
depends_on "[email protected]"
uses_from_macos "groff"
on_linux do
depends_on "libyaml"
end
def install
venv = virtualenv_create(libexec, "python3")
system libexec/"bin/pip", "install", "-v", "-r", "requirements.txt",
"--ignore-installed", buildpath
system libexec/"bin/pip", "uninstall", "-y", "awscli"
venv.pip_install_and_link buildpath
system libexec/"bin/pip", "uninstall", "-y", "pyinstaller"
pkgshare.install "awscli/examples"
rm Dir["#{bin}/{aws.cmd,aws_bash_completer,aws_zsh_completer.sh}"]
bash_completion.install "bin/aws_bash_completer"
zsh_completion.install "bin/aws_zsh_completer.sh"
(zsh_completion/"_aws").write <<~EOS
#compdef aws
_aws () {
local e
e=$(dirname ${funcsourcetrace[1]%:*})/aws_zsh_completer.sh
if [[ -f $e ]]; then source $e; fi
}
EOS
system libexec/"bin/python3", "scripts/gen-ac-index", "--include-builtin-index"
end
def caveats
<<~EOS
The "examples" directory has been installed to:
#{HOMEBREW_PREFIX}/share/awscli/examples
EOS
end
test do
assert_match "topics", shell_output("#{bin}/aws help")
assert_includes Dir["#{libexec}/lib/python3.9/site-packages/awscli/data/*"],
"#{libexec}/lib/python3.9/site-packages/awscli/data/ac.index"
end
end
| 34.765625 | 106 | 0.678652 |
91e5b0dd140459bf71922477e695b002305b1c43 | 2,345 | class Itinerary < ActiveRecord::Base
belongs_to :user
has_many :reservations
has_many :meals
has_many :activities
has_many :hotels, through: :reservations
has_many :attractions, through: :activities
has_many :restaurants, through: :meals
has_many :itinerary_items
validates_presence_of :title, :user_id
def date_range
start_date = self.start_date.to_date
end_date = self.end_date.to_date
itinerary_dates = (start_date..end_date).map{|date|date.strftime("%B %d, %Y")}
end
def res_pins
reservations = self.reservations.collect do |reservation, json|
[reservation.id, reservation.hotel.name, reservation.hotel.address, reservation.hotel.latitude, reservation.hotel.longitude, "reservations"]
end.to_json.html_safe
end
def meal_pins
meal = self.meals.collect do |meal, json|
[meal.id, meal.restaurant.name, meal.restaurant.address, meal.restaurant.latitude, meal.restaurant.longitude, "meals"]
end.to_json.html_safe
end
def activity_pins
activities = self.activities.collect do |activity, json|
[activity.id, activity.attraction.name, activity.attraction.address, activity.attraction.latitude, activity.attraction.longitude, "activities"]
end.to_json.html_safe
end
def itinerary_display_pic?
if activities.first.nil? || activities.first.photos.first.nil?
return false
else
return true
end
end
def itinerary_item_photos
photos = []
["reservations", "meals", "activities"].each do |itinerary_item|
itinerary_items = self.public_send(itinerary_item)
itinerary_items.each do |t|
t.public_send("photos").each do |photo|
photos << photo.image
end
end
end
photos
end
def itinerary_waypoints
way_points = []
["reservations", "meals", "activities"].each do |itinerary_item|
itinerary_items = self.public_send(itinerary_item)
itinerary_items.each do |item|
way_points << find_parent(item).public_send("address") + " | "
end
end
way_points
end
def find_parent(item)
if item.class == Reservation
return item.public_send("hotel")
elsif item.class == Meal
return item.public_send("restaurant")
elsif item.class == Activity
return item.public_send("attraction")
end
end
end
| 27.588235 | 149 | 0.699787 |
1cc84fd1548f89bf524e6d64584cbbd75903d05f | 421 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Hdinsight::Mgmt::V2018_06_01_preview
module Models
#
# Defines values for PublicNetworkAccess
#
module PublicNetworkAccess
InboundAndOutbound = "InboundAndOutbound"
OutboundOnly = "OutboundOnly"
end
end
end
| 24.764706 | 70 | 0.733967 |
1ce13bb3c3123551eaed3af2b14a40e8abb0a44c | 3,495 | require 'date'
module MergetrainCheck
class TraintableFormatter
def initialize(max_length, firstname_only)
@max_length = max_length
@firstname_only = firstname_only
end
def format(body)
values = [['St', 'Waiting', 'Running', 'MR', 'Pipe ID', 'User', 'Title']]
values << spacer = nil
previous_state = body.first['status']
body.each do |carriage|
begin_time = date_from_string carriage['created_at']
pipeline_begin_time = date_from_string carriage['pipeline']['created_at']
end_time = carriage['merged_at'].nil? ? DateTime.now : date_from_string(carriage['merged_at'])
is_finished_section = previous_state != carriage['status']
previous_state = carriage['status']
values << spacer if is_finished_section
values << [pipeline_status(carriage['status']),
pretty_date_difference(begin_time, pipeline_begin_time),
pretty_date_difference(pipeline_begin_time, end_time),
carriage['merge_request']['iid'],
carriage['pipeline']['id'],
@firstname_only ? carriage['user']['name'].split.first : carriage['user']['name'],
truncate_string(carriage['merge_request']['title'], @max_length)]
end
header = ''
if has_train_finished? body
header = "\n ✋ 🚉 The train is at the station: There are currently no running merge trains!\n\n"
end
header + values.to_table
end
private
def pipeline_status(status)
return '🚂' if status == 'fresh'
return '✅' if status == 'merged'
status
end
def truncate_string(string, len)
return string if string.length <= len
"#{string[0...len]}..."
end
def date_from_string(datestring)
DateTime.parse(datestring, '%Q')
end
def pretty_date_difference(from, to)
(to.to_time - from.to_time).duration
end
def has_train_finished?(data)
data.first['status'] != 'fresh'
end
end
end
class Array
def to_table
output = ''
column_sizes = self.reduce([]) do |lengths, row|
if row.nil?
lengths
else
row.each_with_index.map{|iterand, index| [lengths[index] || 0, iterand.to_s.length + count_emojis(iterand.to_s)].max}
end
end
output += head = '-' * (column_sizes.inject(&:+) + (3 * column_sizes.count) + 1) + "\n"
self.each_with_index do |row, idx|
if row.nil?
row = column_sizes.map { |l| '-' * l }
output += '| ' + row.join(' | ') + ' |' + "\n"
else
row = row.fill(nil, row.size..(column_sizes.size - 1))
row = row.each_with_index.map{|v, i| v = v.to_s + ' ' * (column_sizes[i] - v.to_s.length - count_emojis(v.to_s))}
output += '| ' + row.join(' | ') + ' |' + "\n"
end
end
output += head
end
private
def count_emojis(string)
string.scan(/[\u{1F300}-\u{1F5FF}|\u{1F1E6}-\u{1F1FF}|\u{2700}-\u{27BF}|\u{1F900}-\u{1F9FF}|\u{1F600}-\u{1F64F}|\u{1F680}-\u{1F6FF}|\u{2600}-\u{26FF}]/).length
end
end
class Numeric
def duration
rest, secs = self.divmod( 60 ) # self is the time difference t2 - t1
rest, mins = rest.divmod( 60 )
days, hours = rest.divmod( 24 )
result = []
result << "#{days}D " if days > 0
result << "#{hours}h" if hours > 0
result << "#{mins}m" if mins > 0
result << "#{secs.to_i}s" if secs.to_i > 0
return result.join(' ')
end
end
| 31.486486 | 163 | 0.590558 |
e9f328824d4f6e706fde17255878156813946b7f | 1,817 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Kusto::Mgmt::V2019_11_09
module Models
#
# A principal assignment check name availability request.
#
class DatabasePrincipalAssignmentCheckNameRequest
include MsRestAzure
# @return [String] Principal Assignment resource name.
attr_accessor :name
# @return [String] The type of resource,
# Microsoft.Kusto/clusters/databases/principalAssignments. Default value:
# 'Microsoft.Kusto/clusters/databases/principalAssignments' .
attr_accessor :type
#
# Mapper for DatabasePrincipalAssignmentCheckNameRequest class as Ruby
# Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'DatabasePrincipalAssignmentCheckNameRequest',
type: {
name: 'Composite',
class_name: 'DatabasePrincipalAssignmentCheckNameRequest',
model_properties: {
name: {
client_side_validation: true,
required: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: true,
is_constant: true,
serialized_name: 'type',
default_value: 'Microsoft.Kusto/clusters/databases/principalAssignments',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 29.306452 | 89 | 0.571822 |
388f5853e8f9b4b641cd05facc77f275e4a32e9e | 590 |
e = 0.5
##| ##| SARE JAHAN SE ACCHA
play_pattern_timed [:F,:F,:E,:D,:E,:Db,:D,:D],
[e,e,e,e,e,e,e,e], amp: 2, sustain: 0
sleep 1
##| ##| HINDOSTAN HAMARA HAMARAAA
play_pattern_timed [ chord(:A, :minor), chord(:B, :minor) ,:D ,:E ,:F3 ,:G ,:F3 ,:F3 ,:E ,:G ,:F3 ,:E ,:D ,:Eb],
[e,e,e,e,e,e,e,e,e,e,e,e,e], amp: 2, sustain: 0
sleep 1
##| HUM BULBULAIN HAI ISS KI
play_pattern_timed [:F3, :G, :A ,:A ,:A ,:F3, :G ,:B ,:A],
[e,e,e,e,e,e,e,e,e], amp: 2, sustain: 0
sleep 1
##| ##| SARE JAHAN SE ACCHA
play_pattern_timed [:F,:F,:E,:D,:E,:Db,:D,:D],
[e,e,e,e,e,e,e,e], amp: 2, sustain: 0
| 32.777778 | 112 | 0.545763 |
18e4277d7a089a31aad77b7f348464999af2378f | 35,688 | require 'zlib'
require 'abstract_unit'
require 'active_support/ordered_options'
class AssetTagHelperTest < ActionView::TestCase
tests ActionView::Helpers::AssetTagHelper
attr_reader :request
def setup
super
@controller = BasicController.new
@request = Class.new do
attr_accessor :script_name
def protocol() 'http://' end
def ssl?() false end
def host_with_port() 'localhost' end
def base_url() 'http://www.example.com' end
end.new
@controller.request = @request
end
def url_for(*args)
"http://www.example.com"
end
AssetPathToTag = {
%(asset_path("foo")) => %(/foo),
%(asset_path("style.css")) => %(/style.css),
%(asset_path("xmlhr.js")) => %(/xmlhr.js),
%(asset_path("xml.png")) => %(/xml.png),
%(asset_path("dir/xml.png")) => %(/dir/xml.png),
%(asset_path("/dir/xml.png")) => %(/dir/xml.png),
%(asset_path("script.min")) => %(/script.min),
%(asset_path("script.min.js")) => %(/script.min.js),
%(asset_path("style.min")) => %(/style.min),
%(asset_path("style.min.css")) => %(/style.min.css),
%(asset_path("http://www.outside.com/image.jpg")) => %(http://www.outside.com/image.jpg),
%(asset_path("HTTP://www.outside.com/image.jpg")) => %(HTTP://www.outside.com/image.jpg),
%(asset_path("style", type: :stylesheet)) => %(/stylesheets/style.css),
%(asset_path("xmlhr", type: :javascript)) => %(/javascripts/xmlhr.js),
%(asset_path("xml.png", type: :image)) => %(/images/xml.png)
}
AutoDiscoveryToTag = {
%(auto_discovery_link_tag) => %(<link href="http://www.example.com" rel="alternate" title="RSS" type="application/rss+xml" />),
%(auto_discovery_link_tag(:rss)) => %(<link href="http://www.example.com" rel="alternate" title="RSS" type="application/rss+xml" />),
%(auto_discovery_link_tag(:atom)) => %(<link href="http://www.example.com" rel="alternate" title="ATOM" type="application/atom+xml" />),
%(auto_discovery_link_tag(:rss, :action => "feed")) => %(<link href="http://www.example.com" rel="alternate" title="RSS" type="application/rss+xml" />),
%(auto_discovery_link_tag(:rss, "http://localhost/feed")) => %(<link href="http://localhost/feed" rel="alternate" title="RSS" type="application/rss+xml" />),
%(auto_discovery_link_tag(:rss, "//localhost/feed")) => %(<link href="//localhost/feed" rel="alternate" title="RSS" type="application/rss+xml" />),
%(auto_discovery_link_tag(:rss, {:action => "feed"}, {:title => "My RSS"})) => %(<link href="http://www.example.com" rel="alternate" title="My RSS" type="application/rss+xml" />),
%(auto_discovery_link_tag(:rss, {}, {:title => "My RSS"})) => %(<link href="http://www.example.com" rel="alternate" title="My RSS" type="application/rss+xml" />),
%(auto_discovery_link_tag(nil, {}, {:type => "text/html"})) => %(<link href="http://www.example.com" rel="alternate" title="" type="text/html" />),
%(auto_discovery_link_tag(nil, {}, {:title => "No stream.. really", :type => "text/html"})) => %(<link href="http://www.example.com" rel="alternate" title="No stream.. really" type="text/html" />),
%(auto_discovery_link_tag(:rss, {}, {:title => "My RSS", :type => "text/html"})) => %(<link href="http://www.example.com" rel="alternate" title="My RSS" type="text/html" />),
%(auto_discovery_link_tag(:atom, {}, {:rel => "Not so alternate"})) => %(<link href="http://www.example.com" rel="Not so alternate" title="ATOM" type="application/atom+xml" />),
}
JavascriptPathToTag = {
%(javascript_path("xmlhr")) => %(/javascripts/xmlhr.js),
%(javascript_path("super/xmlhr")) => %(/javascripts/super/xmlhr.js),
%(javascript_path("/super/xmlhr.js")) => %(/super/xmlhr.js),
%(javascript_path("xmlhr.min")) => %(/javascripts/xmlhr.min.js),
%(javascript_path("xmlhr.min.js")) => %(/javascripts/xmlhr.min.js),
%(javascript_path("xmlhr.js?123")) => %(/javascripts/xmlhr.js?123),
%(javascript_path("xmlhr.js?body=1")) => %(/javascripts/xmlhr.js?body=1),
%(javascript_path("xmlhr.js#hash")) => %(/javascripts/xmlhr.js#hash),
%(javascript_path("xmlhr.js?123#hash")) => %(/javascripts/xmlhr.js?123#hash)
}
PathToJavascriptToTag = {
%(path_to_javascript("xmlhr")) => %(/javascripts/xmlhr.js),
%(path_to_javascript("super/xmlhr")) => %(/javascripts/super/xmlhr.js),
%(path_to_javascript("/super/xmlhr.js")) => %(/super/xmlhr.js)
}
JavascriptUrlToTag = {
%(javascript_url("xmlhr")) => %(http://www.example.com/javascripts/xmlhr.js),
%(javascript_url("super/xmlhr")) => %(http://www.example.com/javascripts/super/xmlhr.js),
%(javascript_url("/super/xmlhr.js")) => %(http://www.example.com/super/xmlhr.js)
}
UrlToJavascriptToTag = {
%(url_to_javascript("xmlhr")) => %(http://www.example.com/javascripts/xmlhr.js),
%(url_to_javascript("super/xmlhr")) => %(http://www.example.com/javascripts/super/xmlhr.js),
%(url_to_javascript("/super/xmlhr.js")) => %(http://www.example.com/super/xmlhr.js)
}
JavascriptIncludeToTag = {
%(javascript_include_tag("bank")) => %(<script src="/javascripts/bank.js" ></script>),
%(javascript_include_tag("bank.js")) => %(<script src="/javascripts/bank.js" ></script>),
%(javascript_include_tag("bank", :lang => "vbscript")) => %(<script lang="vbscript" src="/javascripts/bank.js" ></script>),
%(javascript_include_tag("http://example.com/all")) => %(<script src="http://example.com/all"></script>),
%(javascript_include_tag("http://example.com/all.js")) => %(<script src="http://example.com/all.js"></script>),
%(javascript_include_tag("//example.com/all.js")) => %(<script src="//example.com/all.js"></script>),
}
StylePathToTag = {
%(stylesheet_path("bank")) => %(/stylesheets/bank.css),
%(stylesheet_path("bank.css")) => %(/stylesheets/bank.css),
%(stylesheet_path('subdir/subdir')) => %(/stylesheets/subdir/subdir.css),
%(stylesheet_path('/subdir/subdir.css')) => %(/subdir/subdir.css),
%(stylesheet_path("style.min")) => %(/stylesheets/style.min.css),
%(stylesheet_path("style.min.css")) => %(/stylesheets/style.min.css)
}
PathToStyleToTag = {
%(path_to_stylesheet("style")) => %(/stylesheets/style.css),
%(path_to_stylesheet("style.css")) => %(/stylesheets/style.css),
%(path_to_stylesheet('dir/file')) => %(/stylesheets/dir/file.css),
%(path_to_stylesheet('/dir/file.rcss', :extname => false)) => %(/dir/file.rcss),
%(path_to_stylesheet('/dir/file', :extname => '.rcss')) => %(/dir/file.rcss)
}
StyleUrlToTag = {
%(stylesheet_url("bank")) => %(http://www.example.com/stylesheets/bank.css),
%(stylesheet_url("bank.css")) => %(http://www.example.com/stylesheets/bank.css),
%(stylesheet_url('subdir/subdir')) => %(http://www.example.com/stylesheets/subdir/subdir.css),
%(stylesheet_url('/subdir/subdir.css')) => %(http://www.example.com/subdir/subdir.css)
}
UrlToStyleToTag = {
%(url_to_stylesheet("style")) => %(http://www.example.com/stylesheets/style.css),
%(url_to_stylesheet("style.css")) => %(http://www.example.com/stylesheets/style.css),
%(url_to_stylesheet('dir/file')) => %(http://www.example.com/stylesheets/dir/file.css),
%(url_to_stylesheet('/dir/file.rcss', :extname => false)) => %(http://www.example.com/dir/file.rcss),
%(url_to_stylesheet('/dir/file', :extname => '.rcss')) => %(http://www.example.com/dir/file.rcss)
}
StyleLinkToTag = {
%(stylesheet_link_tag("bank")) => %(<link href="/stylesheets/bank.css" media="screen" rel="stylesheet" />),
%(stylesheet_link_tag("bank.css")) => %(<link href="/stylesheets/bank.css" media="screen" rel="stylesheet" />),
%(stylesheet_link_tag("/elsewhere/file")) => %(<link href="/elsewhere/file.css" media="screen" rel="stylesheet" />),
%(stylesheet_link_tag("subdir/subdir")) => %(<link href="/stylesheets/subdir/subdir.css" media="screen" rel="stylesheet" />),
%(stylesheet_link_tag("bank", :media => "all")) => %(<link href="/stylesheets/bank.css" media="all" rel="stylesheet" />),
%(stylesheet_link_tag("http://www.example.com/styles/style")) => %(<link href="http://www.example.com/styles/style" media="screen" rel="stylesheet" />),
%(stylesheet_link_tag("http://www.example.com/styles/style.css")) => %(<link href="http://www.example.com/styles/style.css" media="screen" rel="stylesheet" />),
%(stylesheet_link_tag("//www.example.com/styles/style.css")) => %(<link href="//www.example.com/styles/style.css" media="screen" rel="stylesheet" />),
}
ImagePathToTag = {
%(image_path("xml")) => %(/images/xml),
%(image_path("xml.png")) => %(/images/xml.png),
%(image_path("dir/xml.png")) => %(/images/dir/xml.png),
%(image_path("/dir/xml.png")) => %(/dir/xml.png)
}
PathToImageToTag = {
%(path_to_image("xml")) => %(/images/xml),
%(path_to_image("xml.png")) => %(/images/xml.png),
%(path_to_image("dir/xml.png")) => %(/images/dir/xml.png),
%(path_to_image("/dir/xml.png")) => %(/dir/xml.png)
}
ImageUrlToTag = {
%(image_url("xml")) => %(http://www.example.com/images/xml),
%(image_url("xml.png")) => %(http://www.example.com/images/xml.png),
%(image_url("dir/xml.png")) => %(http://www.example.com/images/dir/xml.png),
%(image_url("/dir/xml.png")) => %(http://www.example.com/dir/xml.png)
}
UrlToImageToTag = {
%(url_to_image("xml")) => %(http://www.example.com/images/xml),
%(url_to_image("xml.png")) => %(http://www.example.com/images/xml.png),
%(url_to_image("dir/xml.png")) => %(http://www.example.com/images/dir/xml.png),
%(url_to_image("/dir/xml.png")) => %(http://www.example.com/dir/xml.png)
}
ImageLinkToTag = {
%(image_tag("xml.png")) => %(<img alt="Xml" src="/images/xml.png" />),
%(image_tag("rss.gif", :alt => "rss syndication")) => %(<img alt="rss syndication" src="/images/rss.gif" />),
%(image_tag("gold.png", :size => "20")) => %(<img alt="Gold" height="20" src="/images/gold.png" width="20" />),
%(image_tag("gold.png", :size => "45x70")) => %(<img alt="Gold" height="70" src="/images/gold.png" width="45" />),
%(image_tag("gold.png", "size" => "45x70")) => %(<img alt="Gold" height="70" src="/images/gold.png" width="45" />),
%(image_tag("error.png", "size" => "45 x 70")) => %(<img alt="Error" src="/images/error.png" />),
%(image_tag("error.png", "size" => "x")) => %(<img alt="Error" src="/images/error.png" />),
%(image_tag("google.com.png")) => %(<img alt="Google.com" src="/images/google.com.png" />),
%(image_tag("slash..png")) => %(<img alt="Slash." src="/images/slash..png" />),
%(image_tag(".pdf.png")) => %(<img alt=".pdf" src="/images/.pdf.png" />),
%(image_tag("http://www.rubyonrails.com/images/rails.png")) => %(<img alt="Rails" src="http://www.rubyonrails.com/images/rails.png" />),
%(image_tag("//www.rubyonrails.com/images/rails.png")) => %(<img alt="Rails" src="//www.rubyonrails.com/images/rails.png" />),
%(image_tag("mouse.png", :alt => nil)) => %(<img src="/images/mouse.png" />),
%(image_tag("data:image/gif;base64,R0lGODlhAQABAID/AMDAwAAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==", :alt => nil)) => %(<img src="data:image/gif;base64,R0lGODlhAQABAID/AMDAwAAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==" />),
%(image_tag("")) => %(<img src="" />)
}
FaviconLinkToTag = {
%(favicon_link_tag) => %(<link href="/images/favicon.ico" rel="shortcut icon" type="image/x-icon" />),
%(favicon_link_tag 'favicon.ico') => %(<link href="/images/favicon.ico" rel="shortcut icon" type="image/x-icon" />),
%(favicon_link_tag 'favicon.ico', :rel => 'foo') => %(<link href="/images/favicon.ico" rel="foo" type="image/x-icon" />),
%(favicon_link_tag 'favicon.ico', :rel => 'foo', :type => 'bar') => %(<link href="/images/favicon.ico" rel="foo" type="bar" />),
%(favicon_link_tag 'mb-icon.png', :rel => 'apple-touch-icon', :type => 'image/png') => %(<link href="/images/mb-icon.png" rel="apple-touch-icon" type="image/png" />)
}
VideoPathToTag = {
%(video_path("xml")) => %(/videos/xml),
%(video_path("xml.ogg")) => %(/videos/xml.ogg),
%(video_path("dir/xml.ogg")) => %(/videos/dir/xml.ogg),
%(video_path("/dir/xml.ogg")) => %(/dir/xml.ogg)
}
PathToVideoToTag = {
%(path_to_video("xml")) => %(/videos/xml),
%(path_to_video("xml.ogg")) => %(/videos/xml.ogg),
%(path_to_video("dir/xml.ogg")) => %(/videos/dir/xml.ogg),
%(path_to_video("/dir/xml.ogg")) => %(/dir/xml.ogg)
}
VideoUrlToTag = {
%(video_url("xml")) => %(http://www.example.com/videos/xml),
%(video_url("xml.ogg")) => %(http://www.example.com/videos/xml.ogg),
%(video_url("dir/xml.ogg")) => %(http://www.example.com/videos/dir/xml.ogg),
%(video_url("/dir/xml.ogg")) => %(http://www.example.com/dir/xml.ogg)
}
UrlToVideoToTag = {
%(url_to_video("xml")) => %(http://www.example.com/videos/xml),
%(url_to_video("xml.ogg")) => %(http://www.example.com/videos/xml.ogg),
%(url_to_video("dir/xml.ogg")) => %(http://www.example.com/videos/dir/xml.ogg),
%(url_to_video("/dir/xml.ogg")) => %(http://www.example.com/dir/xml.ogg)
}
VideoLinkToTag = {
%(video_tag("xml.ogg")) => %(<video src="/videos/xml.ogg"></video>),
%(video_tag("rss.m4v", :autoplay => true, :controls => true)) => %(<video autoplay="autoplay" controls="controls" src="/videos/rss.m4v"></video>),
%(video_tag("rss.m4v", :autobuffer => true)) => %(<video autobuffer="autobuffer" src="/videos/rss.m4v"></video>),
%(video_tag("gold.m4v", :size => "160x120")) => %(<video height="120" src="/videos/gold.m4v" width="160"></video>),
%(video_tag("gold.m4v", "size" => "320x240")) => %(<video height="240" src="/videos/gold.m4v" width="320"></video>),
%(video_tag("trailer.ogg", :poster => "screenshot.png")) => %(<video poster="/images/screenshot.png" src="/videos/trailer.ogg"></video>),
%(video_tag("error.avi", "size" => "100")) => %(<video height="100" src="/videos/error.avi" width="100"></video>),
%(video_tag("error.avi", "size" => "100 x 100")) => %(<video src="/videos/error.avi"></video>),
%(video_tag("error.avi", "size" => "x")) => %(<video src="/videos/error.avi"></video>),
%(video_tag("http://media.rubyonrails.org/video/rails_blog_2.mov")) => %(<video src="http://media.rubyonrails.org/video/rails_blog_2.mov"></video>),
%(video_tag("//media.rubyonrails.org/video/rails_blog_2.mov")) => %(<video src="//media.rubyonrails.org/video/rails_blog_2.mov"></video>),
%(video_tag("multiple.ogg", "multiple.avi")) => %(<video><source src="/videos/multiple.ogg" /><source src="/videos/multiple.avi" /></video>),
%(video_tag(["multiple.ogg", "multiple.avi"])) => %(<video><source src="/videos/multiple.ogg" /><source src="/videos/multiple.avi" /></video>),
%(video_tag(["multiple.ogg", "multiple.avi"], :size => "160x120", :controls => true)) => %(<video controls="controls" height="120" width="160"><source src="/videos/multiple.ogg" /><source src="/videos/multiple.avi" /></video>)
}
AudioPathToTag = {
%(audio_path("xml")) => %(/audios/xml),
%(audio_path("xml.wav")) => %(/audios/xml.wav),
%(audio_path("dir/xml.wav")) => %(/audios/dir/xml.wav),
%(audio_path("/dir/xml.wav")) => %(/dir/xml.wav)
}
PathToAudioToTag = {
%(path_to_audio("xml")) => %(/audios/xml),
%(path_to_audio("xml.wav")) => %(/audios/xml.wav),
%(path_to_audio("dir/xml.wav")) => %(/audios/dir/xml.wav),
%(path_to_audio("/dir/xml.wav")) => %(/dir/xml.wav)
}
AudioUrlToTag = {
%(audio_url("xml")) => %(http://www.example.com/audios/xml),
%(audio_url("xml.wav")) => %(http://www.example.com/audios/xml.wav),
%(audio_url("dir/xml.wav")) => %(http://www.example.com/audios/dir/xml.wav),
%(audio_url("/dir/xml.wav")) => %(http://www.example.com/dir/xml.wav)
}
UrlToAudioToTag = {
%(url_to_audio("xml")) => %(http://www.example.com/audios/xml),
%(url_to_audio("xml.wav")) => %(http://www.example.com/audios/xml.wav),
%(url_to_audio("dir/xml.wav")) => %(http://www.example.com/audios/dir/xml.wav),
%(url_to_audio("/dir/xml.wav")) => %(http://www.example.com/dir/xml.wav)
}
AudioLinkToTag = {
%(audio_tag("xml.wav")) => %(<audio src="/audios/xml.wav"></audio>),
%(audio_tag("rss.wav", :autoplay => true, :controls => true)) => %(<audio autoplay="autoplay" controls="controls" src="/audios/rss.wav"></audio>),
%(audio_tag("http://media.rubyonrails.org/audio/rails_blog_2.mov")) => %(<audio src="http://media.rubyonrails.org/audio/rails_blog_2.mov"></audio>),
%(audio_tag("//media.rubyonrails.org/audio/rails_blog_2.mov")) => %(<audio src="//media.rubyonrails.org/audio/rails_blog_2.mov"></audio>),
%(audio_tag("audio.mp3", "audio.ogg")) => %(<audio><source src="/audios/audio.mp3" /><source src="/audios/audio.ogg" /></audio>),
%(audio_tag(["audio.mp3", "audio.ogg"])) => %(<audio><source src="/audios/audio.mp3" /><source src="/audios/audio.ogg" /></audio>),
%(audio_tag(["audio.mp3", "audio.ogg"], :autobuffer => true, :controls => true)) => %(<audio autobuffer="autobuffer" controls="controls"><source src="/audios/audio.mp3" /><source src="/audios/audio.ogg" /></audio>)
}
FontPathToTag = {
%(font_path("font.eot")) => %(/fonts/font.eot),
%(font_path("font.eot#iefix")) => %(/fonts/font.eot#iefix),
%(font_path("font.woff")) => %(/fonts/font.woff),
%(font_path("font.ttf")) => %(/fonts/font.ttf),
%(font_path("font.ttf?123")) => %(/fonts/font.ttf?123)
}
def test_autodiscovery_link_tag_with_unknown_type_but_not_pass_type_option_key
assert_raise(ArgumentError) do
auto_discovery_link_tag(:xml)
end
end
def test_autodiscovery_link_tag_with_unknown_type
result = auto_discovery_link_tag(:xml, '/feed.xml', :type => 'application/xml')
expected = %(<link href="/feed.xml" rel="alternate" title="XML" type="application/xml" />)
assert_equal expected, result
end
def test_asset_path_tag
AssetPathToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_asset_path_tag_to_not_create_duplicate_slashes
@controller.config.asset_host = "host/"
assert_dom_equal('http://host/foo', asset_path("foo"))
@controller.config.relative_url_root = '/some/root/'
assert_dom_equal('http://host/some/root/foo', asset_path("foo"))
end
def test_compute_asset_public_path
assert_equal "/robots.txt", compute_asset_path("robots.txt")
assert_equal "/robots.txt", compute_asset_path("/robots.txt")
assert_equal "/javascripts/foo.js", compute_asset_path("foo.js", :type => :javascript)
assert_equal "/javascripts/foo.js", compute_asset_path("/foo.js", :type => :javascript)
assert_equal "/stylesheets/foo.css", compute_asset_path("foo.css", :type => :stylesheet)
end
def test_auto_discovery_link_tag
AutoDiscoveryToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_javascript_path
JavascriptPathToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_path_to_javascript_alias_for_javascript_path
PathToJavascriptToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_javascript_url
JavascriptUrlToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_url_to_javascript_alias_for_javascript_url
UrlToJavascriptToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_javascript_include_tag
JavascriptIncludeToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_javascript_include_tag_with_missing_source
assert_nothing_raised {
javascript_include_tag('missing_security_guard')
}
assert_nothing_raised {
javascript_include_tag('http://example.com/css/missing_security_guard')
}
end
def test_javascript_include_tag_is_html_safe
assert javascript_include_tag("prototype").html_safe?
end
def test_javascript_include_tag_relative_protocol
@controller.config.asset_host = "assets.example.com"
assert_dom_equal %(<script src="//assets.example.com/javascripts/prototype.js"></script>), javascript_include_tag('prototype', protocol: :relative)
end
def test_javascript_include_tag_default_protocol
@controller.config.asset_host = "assets.example.com"
@controller.config.default_asset_host_protocol = :relative
assert_dom_equal %(<script src="//assets.example.com/javascripts/prototype.js"></script>), javascript_include_tag('prototype')
end
def test_stylesheet_path
StylePathToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_path_to_stylesheet_alias_for_stylesheet_path
PathToStyleToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_stylesheet_url
StyleUrlToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_url_to_stylesheet_alias_for_stylesheet_url
UrlToStyleToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_stylesheet_link_tag
StyleLinkToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_stylesheet_link_tag_with_missing_source
assert_nothing_raised {
stylesheet_link_tag('missing_security_guard')
}
assert_nothing_raised {
stylesheet_link_tag('http://example.com/css/missing_security_guard')
}
end
def test_stylesheet_link_tag_is_html_safe
assert stylesheet_link_tag('dir/file').html_safe?
assert stylesheet_link_tag('dir/other/file', 'dir/file2').html_safe?
end
def test_stylesheet_link_tag_escapes_options
assert_dom_equal %(<link href="/file.css" media="<script>" rel="stylesheet" />), stylesheet_link_tag('/file', :media => '<script>')
end
def test_stylesheet_link_tag_should_not_output_the_same_asset_twice
assert_dom_equal %(<link href="/stylesheets/wellington.css" media="screen" rel="stylesheet" />\n<link href="/stylesheets/amsterdam.css" media="screen" rel="stylesheet" />), stylesheet_link_tag('wellington', 'wellington', 'amsterdam')
end
def test_stylesheet_link_tag_with_relative_protocol
@controller.config.asset_host = "assets.example.com"
assert_dom_equal %(<link href="//assets.example.com/stylesheets/wellington.css" media="screen" rel="stylesheet" />), stylesheet_link_tag('wellington', protocol: :relative)
end
def test_stylesheet_link_tag_with_default_protocol
@controller.config.asset_host = "assets.example.com"
@controller.config.default_asset_host_protocol = :relative
assert_dom_equal %(<link href="//assets.example.com/stylesheets/wellington.css" media="screen" rel="stylesheet" />), stylesheet_link_tag('wellington')
end
def test_image_path
ImagePathToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_path_to_image_alias_for_image_path
PathToImageToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_image_url
ImageUrlToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_url_to_image_alias_for_image_url
UrlToImageToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_image_alt
[nil, '/', '/foo/bar/', 'foo/bar/'].each do |prefix|
assert_equal 'Rails', image_alt("#{prefix}rails.png")
assert_equal 'Rails', image_alt("#{prefix}rails-9c0a079bdd7701d7e729bd956823d153.png")
assert_equal 'Long file name with hyphens', image_alt("#{prefix}long-file-name-with-hyphens.png")
assert_equal 'Long file name with underscores', image_alt("#{prefix}long_file_name_with_underscores.png")
end
end
def test_image_tag
ImageLinkToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_image_tag_does_not_modify_options
options = {:size => '16x10'}
image_tag('icon', options)
assert_equal({:size => '16x10'}, options)
end
def test_favicon_link_tag
FaviconLinkToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_video_path
VideoPathToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_path_to_video_alias_for_video_path
PathToVideoToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_video_url
VideoUrlToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_url_to_video_alias_for_video_url
UrlToVideoToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_video_tag
VideoLinkToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_audio_path
AudioPathToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_path_to_audio_alias_for_audio_path
PathToAudioToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_audio_url
AudioUrlToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_url_to_audio_alias_for_audio_url
UrlToAudioToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_audio_tag
AudioLinkToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_font_path
FontPathToTag.each { |method, tag| assert_dom_equal(tag, eval(method)) }
end
def test_video_audio_tag_does_not_modify_options
options = {:autoplay => true}
video_tag('video', options)
assert_equal({:autoplay => true}, options)
audio_tag('audio', options)
assert_equal({:autoplay => true}, options)
end
def test_image_tag_interpreting_email_cid_correctly
# An inline image has no need for an alt tag to be automatically generated from the cid:
assert_equal '<img src="cid:thi%25%25sis@acontentid" />', image_tag("cid:thi%25%25sis@acontentid")
end
def test_image_tag_interpreting_email_adding_optional_alt_tag
assert_equal '<img alt="Image" src="cid:thi%25%25sis@acontentid" />', image_tag("cid:thi%25%25sis@acontentid", :alt => "Image")
end
def test_should_not_modify_source_string
source = '/images/rails.png'
copy = source.dup
image_tag(source)
assert_equal copy, source
end
def test_image_path_with_asset_host_proc_returning_nil
@controller.config.asset_host = Proc.new do |source|
unless source.end_with?("tiff")
"cdn.example.com"
end
end
assert_equal "/images/file.tiff", image_path("file.tiff")
assert_equal "http://cdn.example.com/images/file.png", image_path("file.png")
end
def test_caching_image_path_with_caching_and_proc_asset_host_using_request
@controller.config.asset_host = Proc.new do |source, request|
if request.ssl?
"#{request.protocol}#{request.host_with_port}"
else
"#{request.protocol}assets#{source.length}.example.com"
end
end
@controller.request.stubs(:ssl?).returns(false)
assert_equal "http://assets15.example.com/images/xml.png", image_path("xml.png")
@controller.request.stubs(:ssl?).returns(true)
assert_equal "http://localhost/images/xml.png", image_path("xml.png")
end
end
class AssetTagHelperNonVhostTest < ActionView::TestCase
tests ActionView::Helpers::AssetTagHelper
attr_reader :request
def setup
super
@controller = BasicController.new
@controller.config.relative_url_root = "/collaboration/hieraki"
@request = Struct.new(:protocol, :base_url).new("gopher://", "gopher://www.example.com")
@controller.request = @request
end
def url_for(options)
"http://www.example.com/collaboration/hieraki"
end
def test_should_compute_proper_path
assert_dom_equal(%(<link href="http://www.example.com/collaboration/hieraki" rel="alternate" title="RSS" type="application/rss+xml" />), auto_discovery_link_tag)
assert_dom_equal(%(/collaboration/hieraki/javascripts/xmlhr.js), javascript_path("xmlhr"))
assert_dom_equal(%(/collaboration/hieraki/stylesheets/style.css), stylesheet_path("style"))
assert_dom_equal(%(/collaboration/hieraki/images/xml.png), image_path("xml.png"))
end
def test_should_return_nothing_if_asset_host_isnt_configured
assert_equal nil, compute_asset_host("foo")
end
def test_should_current_request_host_is_always_returned_for_request
assert_equal "gopher://www.example.com", compute_asset_host("foo", :protocol => :request)
end
def test_should_ignore_relative_root_path_on_complete_url
assert_dom_equal(%(http://www.example.com/images/xml.png), image_path("http://www.example.com/images/xml.png"))
end
def test_should_return_simple_string_asset_host
@controller.config.asset_host = "assets.example.com"
assert_equal "gopher://assets.example.com", compute_asset_host("foo")
end
def test_should_return_relative_asset_host
@controller.config.asset_host = "assets.example.com"
assert_equal "//assets.example.com", compute_asset_host("foo", :protocol => :relative)
end
def test_should_return_custom_protocol_asset_host
@controller.config.asset_host = "assets.example.com"
assert_equal "ftp://assets.example.com", compute_asset_host("foo", :protocol => "ftp")
end
def test_should_compute_proper_path_with_asset_host
@controller.config.asset_host = "assets.example.com"
assert_dom_equal(%(<link href="http://www.example.com/collaboration/hieraki" rel="alternate" title="RSS" type="application/rss+xml" />), auto_discovery_link_tag)
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/javascripts/xmlhr.js), javascript_path("xmlhr"))
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/stylesheets/style.css), stylesheet_path("style"))
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/images/xml.png), image_path("xml.png"))
end
def test_should_compute_proper_path_with_asset_host_and_default_protocol
@controller.config.asset_host = "assets.example.com"
@controller.config.default_asset_host_protocol = :request
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/javascripts/xmlhr.js), javascript_path("xmlhr"))
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/stylesheets/style.css), stylesheet_path("style"))
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/images/xml.png), image_path("xml.png"))
end
def test_should_compute_proper_url_with_asset_host
@controller.config.asset_host = "assets.example.com"
assert_dom_equal(%(<link href="http://www.example.com/collaboration/hieraki" rel="alternate" title="RSS" type="application/rss+xml" />), auto_discovery_link_tag)
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/javascripts/xmlhr.js), javascript_url("xmlhr"))
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/stylesheets/style.css), stylesheet_url("style"))
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/images/xml.png), image_url("xml.png"))
end
def test_should_compute_proper_url_with_asset_host_and_default_protocol
@controller.config.asset_host = "assets.example.com"
@controller.config.default_asset_host_protocol = :request
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/javascripts/xmlhr.js), javascript_url("xmlhr"))
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/stylesheets/style.css), stylesheet_url("style"))
assert_dom_equal(%(gopher://assets.example.com/collaboration/hieraki/images/xml.png), image_url("xml.png"))
end
def test_should_return_asset_host_with_protocol
@controller.config.asset_host = "http://assets.example.com"
assert_equal "http://assets.example.com", compute_asset_host("foo")
end
def test_should_ignore_asset_host_on_complete_url
@controller.config.asset_host = "http://assets.example.com"
assert_dom_equal(%(<link href="http://bar.example.com/stylesheets/style.css" media="screen" rel="stylesheet" />), stylesheet_link_tag("http://bar.example.com/stylesheets/style.css"))
end
def test_should_ignore_asset_host_on_scheme_relative_url
@controller.config.asset_host = "http://assets.example.com"
assert_dom_equal(%(<link href="//bar.example.com/stylesheets/style.css" media="screen" rel="stylesheet" />), stylesheet_link_tag("//bar.example.com/stylesheets/style.css"))
end
def test_should_wildcard_asset_host
@controller.config.asset_host = 'http://a%d.example.com'
assert_match(%r(http://a[0123].example.com), compute_asset_host("foo"))
end
def test_should_wildcard_asset_host_between_zero_and_four
@controller.config.asset_host = 'http://a%d.example.com'
assert_match(%r(http://a[0123].example.com/collaboration/hieraki/images/xml.png), image_path('xml.png'))
assert_match(%r(http://a[0123].example.com/collaboration/hieraki/images/xml.png), image_url('xml.png'))
end
def test_asset_host_without_protocol_should_be_protocol_relative
@controller.config.asset_host = 'a.example.com'
assert_equal 'gopher://a.example.com/collaboration/hieraki/images/xml.png', image_path('xml.png')
assert_equal 'gopher://a.example.com/collaboration/hieraki/images/xml.png', image_url('xml.png')
end
def test_asset_host_without_protocol_should_be_protocol_relative_even_if_path_present
@controller.config.asset_host = 'a.example.com/files/go/here'
assert_equal 'gopher://a.example.com/files/go/here/collaboration/hieraki/images/xml.png', image_path('xml.png')
assert_equal 'gopher://a.example.com/files/go/here/collaboration/hieraki/images/xml.png', image_url('xml.png')
end
def test_assert_css_and_js_of_the_same_name_return_correct_extension
assert_dom_equal(%(/collaboration/hieraki/javascripts/foo.js), javascript_path("foo"))
assert_dom_equal(%(/collaboration/hieraki/stylesheets/foo.css), stylesheet_path("foo"))
end
end
class AssetUrlHelperControllerTest < ActionView::TestCase
tests ActionView::Helpers::AssetUrlHelper
def setup
super
@controller = BasicController.new
@controller.extend ActionView::Helpers::AssetUrlHelper
@request = Class.new do
attr_accessor :script_name
def protocol() 'http://' end
def ssl?() false end
def host_with_port() 'www.example.com' end
def base_url() 'http://www.example.com' end
end.new
@controller.request = @request
end
def test_asset_path
assert_equal "/foo", @controller.asset_path("foo")
end
def test_asset_url
assert_equal "http://www.example.com/foo", @controller.asset_url("foo")
end
end
class AssetUrlHelperEmptyModuleTest < ActionView::TestCase
tests ActionView::Helpers::AssetUrlHelper
def setup
super
@module = Module.new
@module.extend ActionView::Helpers::AssetUrlHelper
end
def test_asset_path
assert_equal "/foo", @module.asset_path("foo")
end
def test_asset_url
assert_equal "/foo", @module.asset_url("foo")
end
def test_asset_url_with_request
@module.instance_eval do
def request
Struct.new(:base_url, :script_name).new("http://www.example.com", nil)
end
end
assert @module.request
assert_equal "http://www.example.com/foo", @module.asset_url("foo")
end
def test_asset_url_with_config_asset_host
@module.instance_eval do
def config
Struct.new(:asset_host).new("http://www.example.com")
end
end
assert @module.config.asset_host
assert_equal "http://www.example.com/foo", @module.asset_url("foo")
end
end
| 46.773263 | 237 | 0.681714 |
21fa665d9205bf5c3ce842047a16021f644df8e9 | 14,301 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "simplecov"
require "minitest/autorun"
require "gapic/grpc/service_stub"
require "google/cloud/talent/v4beta1/company_service_pb"
require "google/cloud/talent/v4beta1/company_service_services_pb"
require "google/cloud/talent/v4beta1/company_service"
class ::Google::Cloud::Talent::V4beta1::CompanyService::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args
@call_rpc_count += 1
@requests << @block&.call(*args)
yield @response, @operation if block_given?
@response
end
end
def test_create_company
# Create GRPC objects.
grpc_response = ::Google::Cloud::Talent::V4beta1::Company.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
company = {}
create_company_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_company, name
assert_kind_of ::Google::Cloud::Talent::V4beta1::CreateCompanyRequest, request
assert_equal "hello world", request.parent
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Talent::V4beta1::Company), request.company
refute_nil options
end
Gapic::ServiceStub.stub :new, create_company_client_stub do
# Create client
client = ::Google::Cloud::Talent::V4beta1::CompanyService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_company({ parent: parent, company: company }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_company parent: parent, company: company do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_company ::Google::Cloud::Talent::V4beta1::CreateCompanyRequest.new(parent: parent, company: company) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_company({ parent: parent, company: company }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_company ::Google::Cloud::Talent::V4beta1::CreateCompanyRequest.new(parent: parent, company: company), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_company_client_stub.call_rpc_count
end
end
def test_get_company
# Create GRPC objects.
grpc_response = ::Google::Cloud::Talent::V4beta1::Company.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_company_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_company, name
assert_kind_of ::Google::Cloud::Talent::V4beta1::GetCompanyRequest, request
assert_equal "hello world", request.name
refute_nil options
end
Gapic::ServiceStub.stub :new, get_company_client_stub do
# Create client
client = ::Google::Cloud::Talent::V4beta1::CompanyService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_company({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_company name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_company ::Google::Cloud::Talent::V4beta1::GetCompanyRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_company({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_company ::Google::Cloud::Talent::V4beta1::GetCompanyRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_company_client_stub.call_rpc_count
end
end
def test_update_company
# Create GRPC objects.
grpc_response = ::Google::Cloud::Talent::V4beta1::Company.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
company = {}
update_mask = {}
update_company_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_company, name
assert_kind_of ::Google::Cloud::Talent::V4beta1::UpdateCompanyRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Talent::V4beta1::Company), request.company
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request.update_mask
refute_nil options
end
Gapic::ServiceStub.stub :new, update_company_client_stub do
# Create client
client = ::Google::Cloud::Talent::V4beta1::CompanyService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_company({ company: company, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_company company: company, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_company ::Google::Cloud::Talent::V4beta1::UpdateCompanyRequest.new(company: company, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_company({ company: company, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_company ::Google::Cloud::Talent::V4beta1::UpdateCompanyRequest.new(company: company, update_mask: update_mask), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_company_client_stub.call_rpc_count
end
end
def test_delete_company
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
delete_company_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_company, name
assert_kind_of ::Google::Cloud::Talent::V4beta1::DeleteCompanyRequest, request
assert_equal "hello world", request.name
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_company_client_stub do
# Create client
client = ::Google::Cloud::Talent::V4beta1::CompanyService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_company({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_company name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_company ::Google::Cloud::Talent::V4beta1::DeleteCompanyRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_company({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_company ::Google::Cloud::Talent::V4beta1::DeleteCompanyRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_company_client_stub.call_rpc_count
end
end
def test_list_companies
# Create GRPC objects.
grpc_response = ::Google::Cloud::Talent::V4beta1::ListCompaniesResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_token = "hello world"
page_size = 42
require_open_jobs = true
list_companies_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_companies, name
assert_kind_of ::Google::Cloud::Talent::V4beta1::ListCompaniesRequest, request
assert_equal "hello world", request.parent
assert_equal "hello world", request.page_token
assert_equal 42, request.page_size
assert_equal true, request.require_open_jobs
refute_nil options
end
Gapic::ServiceStub.stub :new, list_companies_client_stub do
# Create client
client = ::Google::Cloud::Talent::V4beta1::CompanyService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_companies({ parent: parent, page_token: page_token, page_size: page_size, require_open_jobs: require_open_jobs }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_companies parent: parent, page_token: page_token, page_size: page_size, require_open_jobs: require_open_jobs do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_companies ::Google::Cloud::Talent::V4beta1::ListCompaniesRequest.new(parent: parent, page_token: page_token, page_size: page_size, require_open_jobs: require_open_jobs) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_companies({ parent: parent, page_token: page_token, page_size: page_size, require_open_jobs: require_open_jobs }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_companies ::Google::Cloud::Talent::V4beta1::ListCompaniesRequest.new(parent: parent, page_token: page_token, page_size: page_size, require_open_jobs: require_open_jobs), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_companies_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Talent::V4beta1::CompanyService::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::Talent::V4beta1::CompanyService::Client::Configuration, config
end
end
| 38.443548 | 225 | 0.718341 |
87f27734ee9adb964feb33e52bd8c38872ada689 | 24 | module ShadowHelper
end
| 8 | 19 | 0.875 |
e893ada85e2b68c6623c2632dfa0c11d82b13a81 | 23,807 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Batch::Mgmt::V2017_05_01
#
# ApplicationPackageOperations
#
class ApplicationPackageOperations
include MsRestAzure
#
# Creates and initializes a new instance of the ApplicationPackageOperations class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [BatchManagementClient] reference to the BatchManagementClient
attr_reader :client
#
# Activates the specified application package.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application to activate.
# @param parameters [ActivateApplicationPackageParameters] The parameters for
# the request.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def activate(resource_group_name, account_name, application_id, version, parameters, custom_headers:nil)
response = activate_async(resource_group_name, account_name, application_id, version, parameters, custom_headers:custom_headers).value!
nil
end
#
# Activates the specified application package.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application to activate.
# @param parameters [ActivateApplicationPackageParameters] The parameters for
# the request.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def activate_with_http_info(resource_group_name, account_name, application_id, version, parameters, custom_headers:nil)
activate_async(resource_group_name, account_name, application_id, version, parameters, custom_headers:custom_headers).value!
end
#
# Activates the specified application package.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application to activate.
# @param parameters [ActivateApplicationPackageParameters] The parameters for
# the request.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def activate_async(resource_group_name, account_name, application_id, version, parameters, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '24'" if !account_name.nil? && account_name.length > 24
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !account_name.nil? && account_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'application_id is nil' if application_id.nil?
fail ArgumentError, 'version is nil' if version.nil?
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::Batch::Mgmt::V2017_05_01::Models::ActivateApplicationPackageParameters.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationId}/versions/{version}/activate'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'accountName' => account_name,'applicationId' => application_id,'version' => version,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:post, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 204
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Creates an application package record.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ApplicationPackage] operation results.
#
def create(resource_group_name, account_name, application_id, version, custom_headers:nil)
response = create_async(resource_group_name, account_name, application_id, version, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Creates an application package record.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def create_with_http_info(resource_group_name, account_name, application_id, version, custom_headers:nil)
create_async(resource_group_name, account_name, application_id, version, custom_headers:custom_headers).value!
end
#
# Creates an application package record.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def create_async(resource_group_name, account_name, application_id, version, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '24'" if !account_name.nil? && account_name.length > 24
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !account_name.nil? && account_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'application_id is nil' if application_id.nil?
fail ArgumentError, 'version is nil' if version.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationId}/versions/{version}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'accountName' => account_name,'applicationId' => application_id,'version' => version,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 201
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Batch::Mgmt::V2017_05_01::Models::ApplicationPackage.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes an application package record and its associated binary file.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application to delete.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def delete(resource_group_name, account_name, application_id, version, custom_headers:nil)
response = delete_async(resource_group_name, account_name, application_id, version, custom_headers:custom_headers).value!
nil
end
#
# Deletes an application package record and its associated binary file.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application to delete.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def delete_with_http_info(resource_group_name, account_name, application_id, version, custom_headers:nil)
delete_async(resource_group_name, account_name, application_id, version, custom_headers:custom_headers).value!
end
#
# Deletes an application package record and its associated binary file.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application to delete.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def delete_async(resource_group_name, account_name, application_id, version, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '24'" if !account_name.nil? && account_name.length > 24
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !account_name.nil? && account_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'application_id is nil' if application_id.nil?
fail ArgumentError, 'version is nil' if version.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationId}/versions/{version}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'accountName' => account_name,'applicationId' => application_id,'version' => version,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 204
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Gets information about the specified application package.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ApplicationPackage] operation results.
#
def get(resource_group_name, account_name, application_id, version, custom_headers:nil)
response = get_async(resource_group_name, account_name, application_id, version, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets information about the specified application package.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, account_name, application_id, version, custom_headers:nil)
get_async(resource_group_name, account_name, application_id, version, custom_headers:custom_headers).value!
end
#
# Gets information about the specified application package.
#
# @param resource_group_name [String] The name of the resource group that
# contains the Batch account.
# @param account_name [String] The name of the Batch account.
# @param application_id [String] The ID of the application.
# @param version [String] The version of the application.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, account_name, application_id, version, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'account_name is nil' if account_name.nil?
fail ArgumentError, "'account_name' should satisfy the constraint - 'MaxLength': '24'" if !account_name.nil? && account_name.length > 24
fail ArgumentError, "'account_name' should satisfy the constraint - 'MinLength': '3'" if !account_name.nil? && account_name.length < 3
fail ArgumentError, "'account_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !account_name.nil? && account_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'application_id is nil' if application_id.nil?
fail ArgumentError, 'version is nil' if version.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationId}/versions/{version}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'accountName' => account_name,'applicationId' => application_id,'version' => version,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Batch::Mgmt::V2017_05_01::Models::ApplicationPackage.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
end
end
| 52.438326 | 200 | 0.709203 |
bf2f4883e8623c9326b9370ed7c4d9fb9c80fb74 | 66,395 | require 'spec_helper'
#
# Class methods.
#
describe VirtFS::VFile, "(#{$fs_interface} interface)" do
before(:all) do
@spec_name = VfsRealFile.basename(__FILE__, ".rb")
@temp_prefix = "#{@spec_name}-"
@this_dir = VfsRealDir.getwd
@root = File::SEPARATOR
@slink_path = temp_name(@temp_prefix, ".symlink")
end
before(:each) do
reset_context
@ext = ".rb"
@temp_file = Tempfile.new([@temp_prefix, @ext])
@data1 = "0123456789" * 4
@temp_file.write(@data1)
@temp_file.close
@full_path = @temp_file.path
@rel_path = File.basename(@full_path)
@parent_dir = File.dirname(@full_path)
VfsRealFile.chown(Process.uid, Process.gid, @full_path)
@ext2 = ".c"
@temp_file2 = Tempfile.new([@temp_prefix, @ext2])
@temp_file2.close
@full_path2 = @temp_file2.path
@rel_path2 = File.basename(@full_path2)
@parent_dir2 = File.dirname(@full_path2)
end
after(:each) do
@temp_file.delete
@temp_file2.delete
end
describe ".absolute_path" do
it "should return the same path as the standard File.absolute_path, when given a dirstring" do
expect(
VirtFS::VFile.absolute_path(@rel_path, @parent_dir)
).to eq(
VfsRealFile.absolute_path(@rel_path, @parent_dir)
)
end
it "should return the same path as the standard File.absolute_path, when using pwd" do
VfsRealDir.chdir(@parent_dir) do
VirtFS.cwd = VfsRealDir.getwd
expect(VirtFS::VFile.absolute_path(@rel_path)).to eq(VfsRealFile.absolute_path(@rel_path))
end
end
end
describe ".atime" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.atime("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.atime(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.atime("nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the same value as the standard File.atime, when given a full path" do
expect(VirtFS::VFile.atime(@full_path)).to eq(VfsRealFile.atime(@full_path))
end
it "should return the same value as the standard File.atime, when given relative path" do
VfsRealDir.chdir(@parent_dir) do
VirtFS.dir_chdir(@parent_dir)
expect(VirtFS::VFile.atime(@rel_path)).to eq(VfsRealFile.atime(@rel_path))
end
end
end
end
describe ".basename" do
it "should return the same value as the standard File.basename" do
expect(VirtFS::VFile.basename(@full_path)).to eq(VfsRealFile.basename(@full_path))
end
end
describe ".blockdev?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.blockdev?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.blockdev?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.blockdev?("nonexistent_file")).to be false
end
it "should return false when given a non-blockdev file" do
expect(VirtFS::VFile.blockdev?(@full_path)).to be false
end
#
# The block_dev_file method fails to find a block device
# file in the Travis environment - disabling this test.
#
# it "should return true when given a blockdev file" do
# expect(bdev = block_dev_file).to_not eq(nil)
# expect(VirtFS::VFile.blockdev?(bdev)).to be true
# end
end
end
describe ".chardev?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.chardev?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.chardev?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.chardev?("nonexistent_file")).to be false
end
it "should return false when given a non-chardev file" do
expect(VirtFS::VFile.chardev?(@full_path)).to be false
end
it "should return true when given a chardev file" do
expect(cdev = char_dev_file).to_not eq(nil)
expect(VirtFS::VFile.chardev?(cdev)).to be true
end
end
end
describe ".chmod" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.chmod(0755, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.chmod(0755, @full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.chmod(0755, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the number of files processed" do
expect(VirtFS::VFile.chmod(0777, @full_path)).to eq(1)
expect(VirtFS::VFile.chmod(0777, @full_path, @full_path2)).to eq(2)
end
it "should change the permission bits on an existing file" do
target_mode = 0755
expect(VfsRealFile.stat(@full_path).mode & 0777).to_not eq(target_mode)
VirtFS::VFile.chmod(target_mode, @full_path)
expect(VfsRealFile.stat(@full_path).mode & 0777).to eq(target_mode)
end
end
end
describe ".chown" do
before(:each) do
stat = VfsRealFile.stat(@full_path)
@owner = stat.uid
@group = stat.gid
end
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.chown(@owner, @group, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.chown(@owner, @group, @full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.chown(@owner, @group, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the number of files processed" do
expect(VirtFS::VFile.chown(@owner, @group, @full_path)).to eq(1)
expect(VirtFS::VFile.chown(@owner, @group, @full_path, @full_path2)).to eq(2)
end
end
end
describe ".ctime" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.ctime("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.ctime(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.ctime("nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the same value as the standard File.ctime, when given a full path" do
expect(VirtFS::VFile.ctime(@full_path)).to eq(VfsRealFile.ctime(@full_path))
end
it "should return the same value as the standard File.ctime, when given relative path" do
VfsRealDir.chdir(@parent_dir) do
VirtFS.dir_chdir(@parent_dir)
expect(VirtFS::VFile.ctime(@rel_path)).to eq(VfsRealFile.ctime(@rel_path))
end
end
end
end
describe ".delete" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.delete("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.delete(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.delete("nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the number of files processed - 1" do
expect(VirtFS::VFile.delete(@full_path)).to eq(1)
end
it "should return the number of files processed - 2" do
expect(VirtFS::VFile.delete(@full_path, @full_path2)).to eq(2)
end
it "should change the permission bits on an existing file" do
expect(VfsRealFile.exist?(@full_path)).to be true
VirtFS::VFile.delete(@full_path)
expect(VfsRealFile.exist?(@full_path)).to_not be true
end
end
end
describe ".directory?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent directory" do
expect do
VirtFS::VFile.directory?("nonexistent_directory")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_directory"
)
end
it "should raise Errno::ENOENT when given a directory that exists in the native FS" do
expect do
VirtFS::VFile.directory?(@parent_dir)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@parent_dir}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent directory" do
expect(VirtFS::VFile.directory?("nonexistent_directory")).to be false
end
it "should return false when given a regular file" do
expect(VirtFS::VFile.directory?(@full_path)).to be false
end
it "should return true when given a directory" do
expect(VirtFS::VFile.directory?(@parent_dir)).to be true
end
end
end
describe ".executable?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.executable?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.executable?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.executable?("nonexistent_file")).to be false
end
it "should return false when given a non-executable file" do
expect(VirtFS::VFile.executable?(@full_path)).to be false
end
it "should return true when given a executable file" do
VfsRealFile.chmod(0100, @full_path)
expect(VirtFS::VFile.executable?(@full_path)).to be true
end
end
end
describe ".executable_real?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.executable_real?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.executable_real?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.executable_real?("nonexistent_file")).to be false
end
it "should return false when given a non-executable file" do
expect(VirtFS::VFile.executable_real?(@full_path)).to be false
end
it "should return true when given a executable file" do
VfsRealFile.chmod(0100, @full_path)
expect(VirtFS::VFile.executable_real?(@full_path)).to be true
end
end
end
describe ".exist?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.exist?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.exist?(@parent_dir)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@parent_dir}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.exist?("nonexistent_directory")).to be false
end
it "should return true when given a regular file" do
expect(VirtFS::VFile.exist?(@full_path)).to be true
end
it "should return true when given a directory" do
expect(VirtFS::VFile.exist?(@parent_dir)).to be true
end
end
end
describe ".expand_path" do
it "should return the same path as the standard File.expand_path, when given a dirstring" do
expect(VirtFS::VFile.expand_path(@rel_path, @parent_dir)).to eq(VfsRealFile.expand_path(@rel_path, @parent_dir))
end
it "should return the same path as the standard File.expand_path, when using pwd" do
VfsRealDir.chdir(@parent_dir) do
VirtFS.cwd = VfsRealDir.getwd
expect(VirtFS::VFile.expand_path(@rel_path)).to eq(VfsRealFile.expand_path(@rel_path))
end
end
end
describe ".extname" do
it "should return the known extension of tempfile 1" do
expect(VirtFS::VFile.extname(@full_path)).to eq(@ext)
end
it "should return the known extension of tempfile 2" do
expect(VirtFS::VFile.extname(@full_path2)).to eq(@ext2)
end
end
describe ".file?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.file?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.file?(@parent_dir)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@parent_dir}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.file?("nonexistent_directory")).to be false
end
it "should return true when given a regular file" do
expect(VirtFS::VFile.file?(@full_path)).to be true
end
it "should return false when given a directory" do
expect(VirtFS::VFile.file?(@parent_dir)).to be false
end
end
end
describe ".fnmatch" do
it "should match representative examples" do
expect(VirtFS::VFile.fnmatch('cat', 'cat')).to be true
expect(VirtFS::VFile.fnmatch('cat', 'category')).to be false
expect(VirtFS::VFile.fnmatch('c?t', 'cat')).to be true
expect(VirtFS::VFile.fnmatch('c\?t', 'cat')).to be false
expect(VirtFS::VFile.fnmatch('c??t', 'cat')).to be false
expect(VirtFS::VFile.fnmatch('c*', 'cats')).to be true
expect(VirtFS::VFile.fnmatch('c/**/t', 'c/a/b/c/t')).to be true
expect(VirtFS::VFile.fnmatch('c**t', 'c/a/b/c/t')).to be true
expect(VirtFS::VFile.fnmatch('c**t', 'cat')).to be true
expect(VirtFS::VFile.fnmatch('**.txt', 'notes.txt')).to be true
expect(VirtFS::VFile.fnmatch('**.txt', 'some/dir/tree/notes.txt')).to be true
expect(VirtFS::VFile.fnmatch('c*t', 'cat')).to be true
expect(VirtFS::VFile.fnmatch('c\at', 'cat')).to be true
expect(VirtFS::VFile.fnmatch('c\at', 'cat', File::FNM_NOESCAPE)).to be false
expect(VirtFS::VFile.fnmatch('a?b', 'a/b')).to be true
expect(VirtFS::VFile.fnmatch('a?b', 'a/b', File::FNM_PATHNAME)).to be false
expect(VirtFS::VFile.fnmatch('*', '.profile')).to be false
expect(VirtFS::VFile.fnmatch('*', '.profile', File::FNM_DOTMATCH)).to be true
expect(VirtFS::VFile.fnmatch('*', 'dave/.profile')).to be true
expect(VirtFS::VFile.fnmatch('*', 'dave/.profile', File::FNM_DOTMATCH)).to be true
expect(VirtFS::VFile.fnmatch('*', 'dave/.profile', File::FNM_PATHNAME)).to be false
expect(VirtFS::VFile.fnmatch('*/*', 'dave/.profile', File::FNM_PATHNAME)).to be false
strict = File::FNM_PATHNAME | File::FNM_DOTMATCH
expect(VirtFS::VFile.fnmatch('*/*', 'dave/.profile', strict)).to be true
end
end
describe ".ftype" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.ftype("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.ftype(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.ftype("nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return 'file' when given a regular file" do
expect(VirtFS::VFile.ftype(@full_path)).to eq('file')
end
it "should return 'directory' when given a directory" do
expect(VirtFS::VFile.ftype(@parent_dir)).to eq('directory')
end
#
# The block_dev_file method fails to find a block device
# file in the Travis environment - disabling this test.
#
# it "should return 'blockSpecial,' when given a block device file" do
# expect(bdev = block_dev_file).to_not eq(nil)
# expect(VirtFS::VFile.ftype(bdev)).to eq('blockSpecial')
# end
it "should return 'characterSpecial,' when given a block device file" do
expect(cdev = char_dev_file).to_not eq(nil)
expect(VirtFS::VFile.ftype(cdev)).to eq('characterSpecial')
end
end
end
describe ".grpowned?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.grpowned?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.grpowned?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.grpowned?("nonexistent_file")).to be false
end
it "should return true when given a file we created" do
expect(VirtFS::VFile.grpowned?(@full_path)).to be true
end
end
end
describe ".identical?" do
before(:each) do
VfsRealFile.symlink(@full_path, @slink_path)
end
after(:each) do
VfsRealFile.delete(@slink_path)
end
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.identical?("nonexistent_file1", "nonexistent_file2")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.identical?(@full_path, @slink_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.identical?(@full_path, "nonexistent_file1")).to be false
end
it "should return true when given the same file" do
expect(VirtFS::VFile.identical?(@full_path, @full_path)).to be true
end
it "should return true when given a file and its symlink" do
expect(VirtFS::VFile.identical?(@full_path, @slink_path)).to be true
end
end
end
describe ".join" do
it "should return the same path as the standard File.join" do
dirs = %( dir1 dir2 dir3 dir4 dir5 )
expect(VirtFS::VFile.join(dirs)).to eq(VfsRealFile.join(dirs))
end
end
if VfsRealFile.respond_to?(:lchmod)
describe ".lchmod" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.lchmod(0755, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.lchmod(0755, @full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.lchmod(0755, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the number of files processed" do
expect(VirtFS::VFile.lchmod(0777, @full_path)).to eq(1)
expect(VirtFS::VFile.lchmod(0777, @full_path, @full_path2)).to eq(2)
end
it "should change the permission bits on an existing file" do
target_mode = 0755
expect(VfsRealFile.stat(@full_path).mode & 0777).to_not eq(target_mode)
VirtFS::VFile.lchmod(target_mode, @full_path)
expect(VfsRealFile.stat(@full_path).mode & 0777).to eq(target_mode)
end
end
end
end
describe ".lchown" do
before(:each) do
stat = VfsRealFile.stat(@full_path)
@owner = stat.uid
@group = stat.gid
VfsRealFile.symlink(@full_path, @slink_path)
end
after(:each) do
VfsRealFile.delete(@slink_path)
end
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.lchown(@owner, @group, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.lchown(@owner, @group, @full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.lchown(@owner, @group, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the number of files processed" do
expect(VirtFS::VFile.lchown(@owner, @group, @slink_path)).to eq(1)
expect(VirtFS::VFile.lchown(@owner, @group, @slink_path, @full_path2)).to eq(2)
end
end
end
describe ".link" do
before(:each) do
@link_path = temp_name(@temp_prefix, ".hardlink")
end
after(:each) do
VfsRealFile.delete(@link_path) if VfsRealFile.exist?(@link_path)
end
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.link("nonexistent_file1", "nonexistent_file2")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.link(@full_path, @link_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.link("nonexistent_file1", @link_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return 0 on success" do
expect(VirtFS::VFile.link(@full_path, @link_path)).to eq(0)
end
it "the link should be identical to the original file" do
expect(VirtFS::VFile.link(@full_path, @link_path)).to eq(0)
expect(VirtFS::VFile.identical?(@full_path, @link_path)).to be true
end
end
end
describe ".lstat" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.lstat("nonexistent_file1")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.lstat(@full_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
VfsRealFile.symlink(@full_path, @slink_path)
end
after(:each) do
VfsRealFile.delete(@slink_path)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.lstat("nonexistent_file1")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the stat information for the symlink" do
expect(VirtFS::VFile.lstat(@slink_path).symlink?).to be true
end
end
end
describe ".mtime" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.mtime("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.mtime(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.mtime("nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the same value as the standard File.mtime, when given a full path" do
expect(VirtFS::VFile.mtime(@full_path)).to eq(VfsRealFile.mtime(@full_path))
end
it "should return the same value as the standard File.mtime, when given relative path" do
VfsRealDir.chdir(@parent_dir) do
VirtFS.dir_chdir(@parent_dir)
expect(VirtFS::VFile.mtime(@rel_path)).to eq(VfsRealFile.mtime(@rel_path))
end
end
end
end
describe ".owned?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.owned?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.owned?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.owned?("nonexistent_file")).to be false
end
it "should return true when given a file we created" do
expect(VirtFS::VFile.owned?(@full_path)).to be true
end
end
end
describe ".path" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should work with a string" do
expect(VirtFS::VFile.path(@full_path)).to eq(@full_path)
end
it "should work with an IO object" do
VirtFS::VFile.open(@full_path) do |fobj|
expect(VirtFS::VFile.path(fobj)).to eq(@full_path)
end
end
end
describe ".pipe?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.pipe?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.pipe?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.pipe?("nonexistent_file")).to be false
end
it "should return false when given a regular file" do
expect(VirtFS::VFile.pipe?(@full_path)).to be false
end
end
end
describe ".readable?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.readable?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.readable?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.readable?("nonexistent_file")).to be false
end
#
# NOTE: This test fails when run under Fusion shared folders.
# Could be due to silent failure of chmod.
#
it "should return false when given a non-readable file" do
VfsRealFile.chmod(0300, @full_path)
expect(VirtFS::VFile.readable?(@full_path)).to be false
end
it "should return true when given a readable file" do
VfsRealFile.chmod(0400, @full_path)
expect(VirtFS::VFile.readable?(@full_path)).to be true
end
end
end
describe ".readable_real?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.readable_real?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.readable_real?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.readable_real?("nonexistent_file")).to be false
end
#
# NOTE: This test fails when run under Fusion shared folders.
# Could be due to silent failure of chmod.
#
it "should return false when given a non-readable file" do
VfsRealFile.chmod(0300, @full_path)
expect(VirtFS::VFile.readable_real?(@full_path)).to be false
end
it "should return true when given a readable file" do
VfsRealFile.chmod(0400, @full_path)
expect(VirtFS::VFile.readable_real?(@full_path)).to be true
end
end
end
describe ".readlink" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.readlink("nonexistent_file1")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.readlink(@full_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
VfsRealFile.symlink(@full_path, @slink_path)
end
after(:each) do
VfsRealFile.delete(@slink_path)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.readlink("nonexistent_file1")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the stat information for the symlink" do
expect(VirtFS::VFile.readlink(@slink_path)).to eq(@full_path)
end
end
end
describe ".realdirpath" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return the same path as the standard realdirpath" do
expect(VirtFS::VFile.realdirpath(@full_path)).to eq(VfsRealFile.realdirpath(@full_path))
end
end
describe ".realpath" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return the same path as the standard realdirpath" do
expect(VirtFS::VFile.realpath(@full_path)).to eq(VfsRealFile.realpath(@full_path))
end
end
describe ".rename" do
before(:each) do
@to_path = temp_name(@temp_prefix, ".renamed")
end
after(:each) do
VfsRealFile.delete(@to_path) if VfsRealFile.exist?(@to_path)
end
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.rename("nonexistent_file1", "nonexistent_file2")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.rename(@full_path, @to_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.rename("nonexistent_file1", @to_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return 0 on success" do
expect(VirtFS::VFile.rename(@full_path, @to_path)).to eq(0)
end
it "the link should rename the file" do
expect(VirtFS::VFile.rename(@full_path, @to_path)).to eq(0)
expect(VirtFS::VFile.exist?(@to_path)).to be true
expect(VirtFS::VFile.exist?(@full_path)).to be false
end
end
end
describe ".setgid?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.setgid?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.setgid?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.setgid?("nonexistent_file")).to be false
end
it "should return false when given a non-setgid file" do
VfsRealFile.chmod(0644, @full_path)
expect(VirtFS::VFile.setgid?(@full_path)).to be false
end
it "should return true when given a setgid file" do
VfsRealFile.chmod(02644, @full_path)
expect(VirtFS::VFile.setgid?(@full_path)).to be true
end
end
end
describe ".setuid?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.setuid?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.setuid?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.setuid?("nonexistent_file")).to be false
end
it "should return false when given a non-setuid file" do
VfsRealFile.chmod(0644, @full_path)
expect(VirtFS::VFile.setuid?(@full_path)).to be false
end
it "should return true when given a setuid file" do
VfsRealFile.chmod(04644, @full_path)
expect(VirtFS::VFile.setuid?(@full_path)).to be true
end
end
end
describe ".size" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.size("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.size(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.size("nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the known size of the file" do
expect(VirtFS::VFile.size(@full_path)).to eq(@data1.bytesize)
end
it "should return the same value as the standard File#size" do
expect(VirtFS::VFile.size(@full_path)).to eq(VfsRealFile.size(@full_path))
end
it "should return 0 for empty file" do
expect(VirtFS::VFile.size(@full_path2)).to eq(0)
end
end
end
describe ".size?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.size?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.size?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.size?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the known size of the file" do
expect(VirtFS::VFile.size?(@full_path)).to eq(@data1.bytesize)
end
it "should return the same value as the standard File#size" do
expect(VirtFS::VFile.size?(@full_path)).to eq(VfsRealFile.size?(@full_path))
end
it "should return nil for empty file" do
expect(VirtFS::VFile.size?(@full_path2)).to eq(nil)
end
end
end
describe ".socket?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.socket?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.socket?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.socket?("nonexistent_file")).to be false
end
it "should return false when given a regular file" do
expect(VirtFS::VFile.socket?(@full_path)).to be false
end
end
end
describe ".split" do
it "should return the same values as the standard File.split" do
expect(VirtFS::VFile.split(@full_path)).to match_array(VfsRealFile.split(@full_path))
end
end
describe ".stat" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.stat("nonexistent_file1")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.stat(@full_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
VfsRealFile.symlink(@full_path, @slink_path)
end
after(:each) do
VfsRealFile.delete(@slink_path)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.stat("nonexistent_file1")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return the stat information for the file" do
expect(VirtFS::VFile.stat(@full_path).symlink?).to be false
end
it "given a symlink, should return the stat information for the regular file" do
expect(VirtFS::VFile.stat(@slink_path).symlink?).to be false
end
end
end
describe ".sticky?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.sticky?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.sticky?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.sticky?("nonexistent_file")).to be false
end
it "should return false when given a non-sticky file" do
VfsRealFile.chmod(0644, @full_path)
expect(VirtFS::VFile.sticky?(@full_path)).to be false
end
it "should return true when given a sticky file" do
VfsRealFile.chmod(01644, @full_path)
expect(VirtFS::VFile.sticky?(@full_path)).to be true
end
end
end
describe ".symlink" do
after(:each) do
VfsRealFile.delete(@slink_path) if VfsRealFile.exist?(@slink_path)
end
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.symlink("nonexistent_file1", "nonexistent_file2")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.symlink(@full_path, @slink_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return 0 on success" do
expect(VirtFS::VFile.symlink(@full_path, @slink_path)).to eq(0)
end
it "the symlink should be identical to the original file" do
expect(VirtFS::VFile.symlink(@full_path, @slink_path)).to eq(0)
expect(VirtFS::VFile.identical?(@full_path, @slink_path)).to be true
end
end
end
describe ".symlink?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.symlink?("nonexistent_file1")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.symlink?(@full_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
VfsRealFile.symlink(@full_path, @slink_path)
end
after(:each) do
VfsRealFile.delete(@slink_path)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.symlink?("nonexistent_file")).to be false
end
it "should return true given a symlink" do
expect(VirtFS::VFile.symlink?(@slink_path)).to be true
end
end
end
describe ".truncate" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.truncate("nonexistent_file", 0)
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.truncate(@full_path, 0)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.truncate("nonexistent_file", 0)
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return 0" do
expect(VirtFS::VFile.truncate(@full_path, 5)).to eq(0)
end
it "should raise truncate the file to the specified size" do
tsize = @data1.bytesize/2
expect(VfsRealFile.size(@full_path)).to_not eq(tsize)
VirtFS::VFile.truncate(@full_path, tsize)
expect(VfsRealFile.size(@full_path)).to eq(tsize)
end
end
end
describe ".utime" do
before(:each) do
@time = Time.new(2015, 9, 12, 9, 50)
end
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.utime(@time, @time, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.utime(@time, @time, @full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.utime(@time, @time, "nonexistent_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should set the atime and mtime of the file, given a full path" do
expect(VirtFS::VFile.utime(@time, @time, @full_path)).to eq(1)
expect(VirtFS::VFile.atime(@full_path)).to eq(@time)
expect(VirtFS::VFile.mtime(@full_path)).to eq(@time)
end
it "should set the atime and mtime of the file, given relative path" do
VfsRealDir.chdir(@parent_dir) do
VirtFS.dir_chdir(@parent_dir)
expect(VirtFS::VFile.utime(@time, @time, @rel_path)).to eq(1)
expect(VirtFS::VFile.atime(@full_path)).to eq(@time)
expect(VirtFS::VFile.mtime(@full_path)).to eq(@time)
end
end
end
end
describe ".world_readable?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.world_readable?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.world_readable?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return nil when given a nonexistent file" do
expect(VirtFS::VFile.world_readable?("nonexistent_file")).to eq(nil)
end
it "should return nil when given a non-world-readable file" do
VfsRealFile.chmod(0773, @full_path)
expect(VirtFS::VFile.world_readable?(@full_path)).to eq(nil)
end
it "should return permission bits when given a world-readable file" do
VfsRealFile.chmod(0004, @full_path)
expect(VirtFS::VFile.world_readable?(@full_path)).to eq(0004)
end
end
end
describe ".world_writable?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.world_writable?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.world_writable?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return nil when given a nonexistent file" do
expect(VirtFS::VFile.world_writable?("nonexistent_file")).to eq(nil)
end
it "should return nil when given a non-world_writable file" do
VfsRealFile.chmod(0775, @full_path)
expect(VirtFS::VFile.world_writable?(@full_path)).to eq(nil)
end
it "should return permission bits when given a world_writable file" do
VfsRealFile.chmod(0002, @full_path)
expect(VirtFS::VFile.world_writable?(@full_path)).to eq(0002)
end
end
end
describe ".writable?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.writable?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.writable?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.writable?("nonexistent_file")).to be false
end
#
# NOTE: This test fails when run under Fusion shared folders.
# Could be due to silent failure of chmod.
#
it "should return false when given a non-writable file" do
VfsRealFile.chmod(0500, @full_path)
expect(VirtFS::VFile.writable?(@full_path)).to be false
end
it "should return true when given a writable file" do
VfsRealFile.chmod(0200, @full_path)
expect(VirtFS::VFile.writable?(@full_path)).to be true
end
end
end
describe ".writable_real?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.writable_real?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.writable_real?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.writable_real?("nonexistent_file")).to be false
end
#
# NOTE: This test fails when run under Fusion shared folders.
# Could be due to silent failure of chmod.
#
it "should return false when given a non-writable file" do
VfsRealFile.chmod(0500, @full_path)
expect(VirtFS::VFile.writable_real?(@full_path)).to be false
end
it "should return true when given a writable file" do
VfsRealFile.chmod(0200, @full_path)
expect(VirtFS::VFile.writable_real?(@full_path)).to be true
end
end
end
describe ".zero?" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when given a nonexistent file" do
expect do
VirtFS::VFile.zero?("nonexistent_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - nonexistent_file"
)
end
it "should raise Errno::ENOENT when given a file that exists in the native FS" do
expect do
VirtFS::VFile.zero?(@full_path)
end.to raise_error(
Errno::ENOENT, "No such file or directory - #{@full_path}"
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should return false when given a nonexistent file" do
expect(VirtFS::VFile.zero?("nonexistent_file")).to be false
end
it "should return false when given a non-zero length file" do
expect(VirtFS::VFile.zero?(@full_path)).to be false
end
it "should return true when given a zero length file" do
expect(VirtFS::VFile.zero?(@full_path2)).to be true
end
end
end
describe ".new" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when the file doesn't exist" do
expect do
VirtFS::VFile.new("not_a_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - not_a_file"
)
end
it "should raise Errno::ENOENT the file does exist in the native FS" do
expect do
VirtFS::VFile.new(@full_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory -/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when the file doesn't exist" do
expect do
VirtFS::VFile.new("not_a_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return a File object - given full path" do
expect(VirtFS::VFile.new(@full_path)).to be_kind_of(VirtFS::VFile)
end
it "should return a directory object - given relative path" do
VirtFS::VDir.chdir(@parent_dir)
expect(VirtFS::VFile.new(@rel_path)).to be_kind_of(VirtFS::VFile)
end
end
end
describe ".open" do
context "with no filesystems mounted" do
it "should raise Errno::ENOENT when file doesn't exist" do
expect do
VirtFS::VFile.open("not_a_file")
end.to raise_error(
Errno::ENOENT, "No such file or directory - not_a_file"
)
end
it "should raise Errno::ENOENT file does exist in the native FS" do
expect do
VirtFS::VFile.open(@full_path)
end.to raise_error(
Errno::ENOENT, /No such file or directory -/
)
end
end
context "with FS mounted on '/'" do
before(:each) do
@native_fs = nativefs_class.new
VirtFS.mount(@native_fs, @root)
end
it "should raise Errno::ENOENT when file doesn't exist" do
expect do
VirtFS::VFile.new("not_a_file")
end.to raise_error(
Errno::ENOENT, /No such file or directory/
)
end
it "should return a File object - when no block given" do
expect(VirtFS::VFile.open(@full_path)).to be_kind_of(VirtFS::VFile)
end
it "should yield a file object to the block - when block given" do
VirtFS::VFile.open(@full_path) { |file_obj| expect(file_obj).to be_kind_of(VirtFS::VFile) }
end
it "should return the value of the block - when block given" do
expect(VirtFS::VFile.open(@full_path) { true }).to be true
end
end
end
end
| 31.511628 | 118 | 0.614835 |
e8e32e2641dd7ba7c37ffce8d7c124aa513465ae | 7,514 | # == Schema Information
#
# Table name: feed_versions
#
# id :integer not null, primary key
# feed_id :integer not null
# feed_type :string default("gtfs"), not null
# file :string default(""), not null
# earliest_calendar_date :date not null
# latest_calendar_date :date not null
# sha1 :string not null
# md5 :string
# tags :hstore
# fetched_at :datetime not null
# imported_at :datetime
# created_at :datetime not null
# updated_at :datetime not null
# import_level :integer default(0), not null
# url :string default(""), not null
# file_raw :string
# sha1_raw :string
# md5_raw :string
# file_feedvalidator :string
# deleted_at :datetime
# sha1_dir :string
#
# Indexes
#
# index_feed_versions_on_earliest_calendar_date (earliest_calendar_date)
# index_feed_versions_on_feed_type_and_feed_id (feed_type,feed_id)
# index_feed_versions_on_latest_calendar_date (latest_calendar_date)
#
describe FeedVersion do
context 'calendar scopes' do
before(:each) do
@fv1 = create(:feed_version, earliest_calendar_date: '2016-01-01', latest_calendar_date: '2017-01-01')
@fv2 = create(:feed_version, earliest_calendar_date: '2016-02-01', latest_calendar_date: '2017-02-01')
@fv3 = create(:feed_version, earliest_calendar_date: '2016-03-01', latest_calendar_date: '2017-03-01')
end
context '.where_calendar_coverage_begins_at_or_before' do
it 'finds FeedVersions with coverage before a date' do
expect(FeedVersion.where_calendar_coverage_begins_at_or_before('2016-04-15')).to match_array([@fv1, @fv2, @fv3])
end
it 'finds FeedVersions with coverage on or before a date' do
expect(FeedVersion.where_calendar_coverage_begins_at_or_before('2016-02-01')).to match_array([@fv1, @fv2])
end
it 'finds FeedVersions with coverage on a date' do
expect(FeedVersion.where_calendar_coverage_begins_at_or_before('2016-01-01')).to match_array([@fv1])
end
end
context '.where_calendar_coverage_begins_at_or_after' do
it 'finds FeedVersions with coverage after a date' do
expect(FeedVersion.where_calendar_coverage_begins_at_or_after('2015-12-01')).to match_array([@fv1, @fv2, @fv3])
end
it 'finds FeedVersions with coverage on or after a date' do
expect(FeedVersion.where_calendar_coverage_begins_at_or_after('2016-02-01')).to match_array([@fv2, @fv3])
end
it 'finds FeedVersions with coverage on a date' do
expect(FeedVersion.where_calendar_coverage_begins_at_or_after('2016-03-01')).to match_array([@fv3])
end
end
context '.where_calendar_coverage_includes' do
it 'finds FeedVersions with coverage including a date' do
expect(FeedVersion.where_calendar_coverage_includes('2016-04-01')).to match_array([@fv1, @fv2, @fv3])
end
it 'finds FeedVersions with coverage including, inclusive' do
expect(FeedVersion.where_calendar_coverage_includes('2016-02-01')).to match_array([@fv1, @fv2])
end
it 'excludes FeedVersions outside coverage range' do
expect(FeedVersion.where_calendar_coverage_includes('2017-01-15')).to match_array([@fv2, @fv3])
end
end
end
context '#compute_and_set_hashes' do
it 'computes file hashes' do
feed_version = create(:feed_version_bart)
expect(feed_version.sha1).to eq '2d340d595ec566ba54b0a6a25359f71d94268b5c'
expect(feed_version.md5).to eq '1197a60bab8f685492aa9e50a732b466'
end
end
context '#delete_schedule_stop_pairs' do
before(:each) do
@feed_version = create(:feed_version)
@ssp = create(:schedule_stop_pair, feed: @feed_version.feed, feed_version: @feed_version)
end
it 'deletes ssps' do
@feed_version.delete_schedule_stop_pairs!
expect(ScheduleStopPair.exists?(@ssp.id)).to be false
expect(@feed_version.imported_schedule_stop_pairs.count).to eq(0)
end
end
context '#extend_schedule_stop_pairs_service_end_date' do
before(:each) do
@feed_version = create(:feed_version)
@extend_from = Date.parse('2016-01-01')
@extend_to = Date.parse('2017-01-01')
@ssp1 = create(:schedule_stop_pair, feed: @feed_version.feed, feed_version: @feed_version, service_end_date: @extend_from)
@ssp2 = create(:schedule_stop_pair, feed: @feed_version.feed, feed_version: @feed_version, service_end_date: @extend_from - 1.day)
end
it 'extends ssp service_end_date' do
@feed_version.extend_schedule_stop_pairs_service_end_date(@extend_from, @extend_to)
expect(@ssp1.reload.service_end_date).to eq(@extend_to)
end
it 'does not extend before extend_from' do
service_end_date = @ssp2.service_end_date
@feed_version.extend_schedule_stop_pairs_service_end_date(@extend_from, @extend_to)
expect(@ssp2.reload.service_end_date).to eq(service_end_date)
end
end
context '#is_active_feed_version' do
it 'is active feed version' do
feed = create(:feed)
active_feed_version = create(:feed_version, feed: feed)
inactive_feed_version = create(:feed_version, feed: feed)
feed.update(active_feed_version: active_feed_version)
expect(active_feed_version.is_active_feed_version).to eq true
expect(inactive_feed_version.is_active_feed_version).to eq false
end
end
context '#import_status' do
it 'never_imported' do
feed_version = create(:feed_version)
expect(feed_version.import_status).to eq(:never_imported)
end
it 'in_progress' do
feed_version = create(:feed_version)
create(:feed_version_import, success: true, feed_version: feed_version)
create(:feed_version_import, success: nil, feed_version: feed_version)
expect(feed_version.import_status).to eq(:in_progress)
end
it 'most_recent_failed' do
feed_version = create(:feed_version)
create(:feed_version_import, success: true, feed_version: feed_version)
create(:feed_version_import, success: false, feed_version: feed_version)
expect(feed_version.import_status).to eq(:most_recent_failed)
end
it 'most_recent_succeeded' do
feed_version = create(:feed_version)
create(:feed_version_import, success: false, feed_version: feed_version)
create(:feed_version_import, success: true, feed_version: feed_version)
expect(feed_version.import_status).to eq(:most_recent_succeeded)
end
end
context '#download_url' do
it 'is included by default' do
feed = create(:feed)
feed_version = create(:feed_version, feed: feed)
allow(feed_version).to receive_message_chain(:file, :url).and_return('http://cloudfront.com/file/f-9q9-bart.zip?auth=1')
expect(feed_version.download_url).to eq('http://cloudfront.com/file/f-9q9-bart.zip')
end
it "isn't included for feeds that don't allow redistribution" do
feed = create(:feed, license_redistribute: 'no')
feed_version = create(:feed_version, feed: feed)
allow(feed_version).to receive_message_chain(:file, :url).and_return('http://cloudfront.com/')
expect(feed_version.download_url).to be_nil
end
end
end
| 42.693182 | 136 | 0.690578 |
87bba3e44fc9ccab4bf90fcaf5e12034775973ca | 1,001 | require "redis_lua/version"
require "digest"
require "redis"
require "yaml"
module RedisLua
class << self
attr_accessor :config_file_path, :lua_script_path
def config
@config ||= load_config(config_file_path)
end
def load_config(file_path)
YAML.load(File.read(file_path))
end
def read_script(name)
file_path = Pathname.new(lua_script_path) + "#{name}.lua"
File.read(file_path)
end
def load_scripts
config.each do |key, _|
load_script(key)
end
end
def load_script(name)
script = read_script(name)
sha1 = Digest::SHA1.hexdigest(script)
if sha1 == config[name]
Redis.current.script(:load, script)
else
raise "sha1 digest mismatch: #{key} #{sha1}"
end
end
def loaded_script?(name)
Redis.current.script(:exists, config[name])
end
def call_script(name, *args)
sha1 = config[name]
Redis.current.evalsha(sha1, *args)
end
end
end
| 20.428571 | 63 | 0.633367 |
5d95b649ee88c8d36bc0e3f85f5c242e662d12ff | 339 | class StaticPagesController < ApplicationController
def home
if logged_in?
@micropost = current_user.microposts.build
@feed_items = current_user.feed.paginate(page: params[:page])
@feed_items = current_user.feed.paginate(page: params[:page])
end
end
def help
end
def about
end
def contact
end
end
| 16.142857 | 68 | 0.707965 |
e92d75cf4a5823909dc486d9a57ee2c458b9ed9a | 7,157 | require 'yt/models/resource'
module Yt
module Models
# A channel resource contains information about a YouTube channel.
# @see https://developers.google.com/youtube/v3/docs/channels
class Channel < Resource
# @!attribute [r] videos
# @return [Yt::Collections::Videos] the channel’s videos.
has_many :videos
# @!attribute [r] playlists
# @return [Yt::Collections::Playlists] the channel’s playlists.
has_many :playlists
has_one :content_detail
delegate :related_playlists, to: :content_detail
has_one :branding_setting
delegate :keywords, :banner_image_url, to: :branding_setting
# @macro has_report
has_report :earnings
# @macro has_report
has_report :views
# @macro has_report
has_report :comments
# @macro has_report
has_report :likes
# @macro has_report
has_report :dislikes
# @macro has_report
has_report :shares
# @macro has_report
has_report :impressions
# @macro has_viewer_percentages
has_viewer_percentages
# @!attribute [r] statistics_set
# @return [Yt::Models::StatisticsSet] the statistics for the video.
has_one :statistics_set
delegate :view_count, :comment_count, :video_count, :subscriber_count,
:subscriber_count_visible?, to: :statistics_set
# @!attribute [r] content_owner_detail
# @return [Yt::Models::ContentOwnerDetail] the video’s content owner
# details.
has_one :content_owner_detail
delegate :content_owner, :linked_at, to: :content_owner_detail
# @!attribute [r] subscribed_channels
# @return [Yt::Collections::SubscribedChannels] the channels that the channel is subscribed to.
# @raise [Yt::Errors::Forbidden] if the owner of the channel has
# explicitly select the option to keep all subscriptions private.
has_many :subscribed_channels
# @!attribute [r] subscription
# @return [Yt::Models::Subscription] the channel’s subscription by auth.
# @raise [Yt::Errors::NoItems] if {Resource#auth auth} is not
# subscribed to the channel.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} does not
# return an authenticated account.
has_one :subscription
# Override Resource's new to set statistics as well
# if the response includes them
def initialize(options = {})
super options
if options[:statistics]
@statistics_set = StatisticsSet.new data: options[:statistics]
end
if options[:viewer_percentages]
@viewer_percentages = options[:viewer_percentages]
end
end
# Returns whether the authenticated account is subscribed to the channel.
#
# This method requires {Resource#auth auth} to return an
# authenticated instance of {Yt::Account}.
# @return [Boolean] whether the account is subscribed to the channel.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} does not
# return an authenticated account.
def subscribed?
sleep [(@subscriptions_updated_at || Time.now) - Time.now, 0].max
subscription.exists?
rescue Errors::NoItems
false
end
# Unsubscribes the authenticated account from the channel.
# Raises an error if the account was not subscribed.
#
# This method requires {Resource#auth auth} to return an
# authenticated instance of {Yt::Account}.
# @raise [Yt::Errors::RequestError] if {Resource#auth auth} was not
# subscribed to the channel.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} does not
# return an authenticated account.
def unsubscribe!
subscription.delete.tap{ throttle_subscriptions }
end
# Unsubscribes the authenticated account from the channel.
# Does not raise an error if the account was not subscribed.
#
# This method requires {Resource#auth auth} to return an
# authenticated instance of {Yt::Account}.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} does not
# return an authenticated account.
def unsubscribe
unsubscribe! if subscribed?
end
# Subscribes the authenticated account to the channel.
# Raises an error if the account was already subscribed.
#
# This method requires {Resource#auth auth} to return an
# authenticated instance of {Yt::Account}.
# @raise [Yt::Errors::RequestError] if {Resource#auth auth} was already
# subscribed to the channel.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} does not
# return an authenticated account.
def subscribe!
subscriptions.insert.tap do |subscription|
throttle_subscriptions
@subscription = subscription
end
end
# Subscribes the authenticated account to the channel.
# Does not raise an error if the account was already subscribed.
#
# This method requires {Resource#auth auth} to return an
# authenticated instance of {Yt::Account}.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} does not
# return an authenticated account.
def subscribe
subscriptions.insert(ignore_errors: true).tap do |subscription|
throttle_subscriptions
@subscription = subscription
end
end
def delete_playlists(attrs = {})
playlists.delete_all attrs
end
# @private
# Tells `has_many :videos` that channel.videos should return all the
# videos publicly available on the channel.
def videos_params
{channel_id: id}
end
# @private
# Tells `has_reports` to retrieve the reports from YouTube Analytics API
# either as a Channel or as a Content Owner.
# @see https://developers.google.com/youtube/analytics/v1/reports
def reports_params
{}.tap do |params|
if auth.owner_name
params[:ids] = "contentOwner==#{auth.owner_name}"
params[:filters] = "channel==#{id}"
else
params[:ids] = "channel==#{id}"
end
end
end
# @private
# Tells `has_one :content_owner_detail` to retrieve the content owner
# detail as the Content Owner, it the channel was authorized with one.
# If it was not, the call will fail, since YouTube only allows content
# owners to check who is the content owner of a channel.
def content_owner_details_params
{on_behalf_of_content_owner: auth.owner_name || auth.id}
end
# @private
# @note Google API must have some caching layer by which if we try to
# delete a subscription that we just created, we encounter an error.
# To overcome this, if we have just updated the subscription, we must
# wait some time before requesting it again.
def throttle_subscriptions(seconds = 10)
@subscriptions_updated_at = Time.now + seconds
end
end
end
end | 36.329949 | 103 | 0.654324 |
e9e7a9322dd8ede1d372bc32403153b4ce105470 | 2,159 | # Copyright (c) 2017-present, Facebook, Inc. All rights reserved.
#
# You are hereby granted a non-exclusive, worldwide, royalty-free license to use,
# copy, modify, and distribute this software in source code or binary form for use
# in connection with the web services and APIs provided by Facebook.
#
# As with any software that integrates with the Facebook platform, your use of
# this software is subject to the Facebook Platform Policy
# [http://developers.facebook.com/policy/]. This copyright notice shall be
# included in all copies or substantial portions of the software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# FB:AUTOGEN
module FacebookAds
# This class is auto-generated.
# For any issues or feature requests related to this class, please let us know
# on github and we'll fix in our codegen framework. We'll not be able to accept
# pull request for this class.
class InsightsResult < AdObject
DATE_PRESET = [
"last_14d",
"last_28d",
"last_30d",
"last_3d",
"last_7d",
"last_90d",
"last_month",
"last_quarter",
"last_week_mon_sun",
"last_week_sun_sat",
"last_year",
"lifetime",
"this_month",
"this_quarter",
"this_week_mon_today",
"this_week_sun_today",
"this_year",
"today",
"yesterday",
]
PERIOD = [
"day",
"days_28",
"lifetime",
"month",
"week",
]
field :description, 'string'
field :description_from_api_doc, 'string'
field :id, 'string'
field :name, 'string'
field :period, 'string'
field :title, 'string'
field :values, { list: 'object' }
has_no_get
has_no_post
has_no_delete
end
end
| 29.575342 | 82 | 0.68226 |
01a28586df7eadbf1b263aaf32273b90cc0a2be8 | 1,554 | module Refinery
class UsersController < ::Devise::RegistrationsController
# Protect these actions behind an admin login
before_filter :redirect?, :only => [:new, :create]
layout 'login'
def new
@user = User.new
end
# This method should only be used to create the first Refinery user.
def create
@user = User.new(params[:user])
if @user.create_first
flash[:message] = "<h2>#{t('welcome', :scope => 'refinery.users.create', :who => @user.username).gsub(/\.$/, '')}.</h2>".html_safe
site_name_setting = ::Refinery::Setting.find_or_create_by_name('site_name', :value => "Company Name")
if site_name_setting.value.to_s =~ /^(|Company\ Name)$/ or ::Refinery::Role[:refinery].users.count == 1
flash[:message] << "<p>#{
t('setup_website_name_html', :scope => 'refinery.users',
:link => main_app.edit_refinery_admin_setting_path(site_name_setting, :dialog => true),
:title => t('edit', :scope => 'refinery.admin.settings'))
}</p>".html_safe
end
sign_in(@user)
redirect_back_or_default(main_app.refinery_admin_root_path)
else
render :action => 'new'
end
end
protected
def redirect?
if refinery_user?
redirect_to main_app.refinery_admin_users_path
elsif refinery_users_exist?
redirect_to main_app.new_refinery_user_session_path
end
end
def refinery_users_exist?
::Refinery::Role[:refinery].users.any?
end
end
end
| 30.470588 | 138 | 0.630631 |
ab64f8342088c8b67468a2ad96a448929b0c3128 | 1,846 | module DestroyRestriction
extend ActiveSupport::Concern
module ClassMethods
attr_reader :destroy_restrictions
# Запрещает удалять модель, если выполнится условие из коллбэка
# @param [Proc, Symbol] if
# @param [String, Symbol] attribute название атрибута, который будет добавлен в ошибки вместе с сообщением
# @param [String, Proc] message сообщение, которое будет добавлено в ошибки (строка, либо лямбда, которая будет выполнена в контексте экземпляра класса)
def restricts_destroy(if: -> { true }, attribute: '', message: 'Невозможно удалить объект')
condition_callback = binding.local_variable_get(:if).to_proc
self.destroy_restrictions << { condition: condition_callback, message: message }
before_destroy do
errors.add(attribute, eval_restriction_message(message)) && throw(:abort) if exec_condition(condition_callback)
end
end
def destroy_restrictions
@destroy_restrictions ||= []
end
end
# Может ли объект быть удалён
# @return [Boolean]
def can_be_destroyed?
self.class.destroy_restrictions.none? { |restriction| exec_condition(restriction[:condition]) }
end
# Сообщение об ошибке в случае невозможности удаления
# @param [String] delimiter разделитель между сообщениями
# @return [String]
def destroy_restriction_message(delimiter: ' ')
messages = self.class.destroy_restrictions.map do |restriction|
exec_condition(restriction[:condition]) ? eval_restriction_message(restriction[:message]) : nil
end
messages.compact.join(delimiter)
end
private
def exec_condition(condition)
condition.lambda? ? instance_exec(&condition) : instance_exec(&proc { condition.(self) })
end
def eval_restriction_message(message)
message.respond_to?(:call) ? exec_condition(message) : message
end
end
| 36.92 | 156 | 0.736728 |
91627da5d15b5f1a64ed5e6b3efa1a407d2bd338 | 1,005 | require 'rails_helper'
RSpec.describe Listing, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
# == Schema Information
#
# Table name: listings
#
# id :integer not null, primary key
# source :integer not null
# source_listing_id :text not null
# title :text
# raw_price :text
# raw_location :text
# full_description :text
# listing_attributes :jsonb
# created_at :datetime not null
# updated_at :datetime not null
# latitude :float
# longitude :float
# page_url :string
# listing_type :integer
# bedrooms :integer
# bathrooms :integer
# parsed_price :integer
# cached_image_urls :string is an Array
# expires_at :datetime
# invalid_at :datetime
#
# Indexes
#
# index_listings_on_source_and_source_listing_id (source,source_listing_id) UNIQUE
#
| 27.916667 | 84 | 0.589055 |
d59be0fa4c854a8512d7b2f1ad957192ae4cecd5 | 1,002 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'holy_cow/version'
Gem::Specification.new do |spec|
spec.name = "holy_cow"
spec.version = HolyCow::VERSION
spec.authors = ["Brian Samson"]
spec.email = ["[email protected]"]
spec.summary = "Dead Simple e-mail notification for developers"
spec.description = %q{holy_cow "I can't believe it's this easy to get notification about what's happening in my app."}
spec.homepage = "http://github.com/tenforwardconsulting/holy_cow"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "actionmailer"
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
end
| 38.538462 | 122 | 0.670659 |
01fcac67efab79d43fd351998e4964eaf37a2018 | 1,592 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2017 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
module CustomActions::Actions::Strategies::Float
include CustomActions::Actions::Strategies::ValidateInRange
def values=(values)
super(Array(values).map { |v| to_float_or_nil(v) }.uniq)
end
def type
:float_property
end
def to_float_or_nil(value)
return nil if value.nil?
Float(value)
rescue TypeError, ArgumentError
nil
end
end
| 31.84 | 91 | 0.749372 |
e9624dc3b6c5e3365c257a896b621b4e70faa435 | 918 | # coding: utf-8
$:.push File.expand_path("../lib", __FILE__)
require 'olap/view/version'
Gem::Specification.new do |spec|
spec.name = "olap-view"
spec.version = Olap::View::VERSION
spec.authors = ["stepanovit"]
spec.email = ["[email protected]"]
spec.summary = %q{OLAP VIEW gem}
spec.description = %q{Ruby On Rails gem to visualize by Google Charts MDX queries on OLAP databases using XMLA connection. Can be used with any XMLA-compliant server, like Olaper or Mondrian.}
spec.homepage = "https://github.com/Wondersoft/olap-view"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_runtime_dependency "olap-xmla", '~> 0.0', '>= 0.0.10'
end | 43.714286 | 196 | 0.657952 |
180c6d9e420845a75140ba2e2edd000342cc09d2 | 6,826 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::RepositoryImporter do
let(:repository) { double(:repository) }
let(:import_state) { double(:import_state) }
let(:client) { double(:client) }
let(:wiki) do
double(
:wiki,
disk_path: 'foo.wiki',
full_path: 'group/foo.wiki',
repository: wiki_repository
)
end
let(:wiki_repository) do
double(:wiki_repository)
end
let(:project) do
double(
:project,
import_url: 'foo.git',
import_source: 'foo/bar',
repository_storage: 'foo',
disk_path: 'foo',
repository: repository,
create_wiki: true,
import_state: import_state,
full_path: 'group/foo',
lfs_enabled?: true,
wiki: wiki
)
end
let(:importer) { described_class.new(project, client) }
let(:shell_adapter) { Gitlab::Shell.new }
before do
# The method "gitlab_shell" returns a new instance every call, making
# it harder to set expectations. To work around this we'll stub the method
# and return the same instance on every call.
allow(importer).to receive(:gitlab_shell).and_return(shell_adapter)
end
describe '#import_wiki?' do
it 'returns true if the wiki should be imported' do
repo = double(:repo, has_wiki: true)
expect(client)
.to receive(:repository)
.with('foo/bar')
.and_return(repo)
expect(project)
.to receive(:wiki_repository_exists?)
.and_return(false)
expect(Gitlab::GitalyClient::RemoteService)
.to receive(:exists?)
.with("foo.wiki.git")
.and_return(true)
expect(importer.import_wiki?).to be(true)
end
it 'returns false if the GitHub wiki is disabled' do
repo = double(:repo, has_wiki: false)
expect(client)
.to receive(:repository)
.with('foo/bar')
.and_return(repo)
expect(importer.import_wiki?).to eq(false)
end
it 'returns false if the wiki has already been imported' do
repo = double(:repo, has_wiki: true)
expect(client)
.to receive(:repository)
.with('foo/bar')
.and_return(repo)
expect(project)
.to receive(:wiki_repository_exists?)
.and_return(true)
expect(importer.import_wiki?).to eq(false)
end
end
describe '#execute' do
it 'imports the repository and wiki' do
expect(project)
.to receive(:empty_repo?)
.and_return(true)
expect(importer)
.to receive(:import_wiki?)
.and_return(true)
expect(importer)
.to receive(:import_repository)
.and_return(true)
expect(importer)
.to receive(:import_wiki_repository)
.and_return(true)
expect(importer)
.to receive(:update_clone_time)
expect(importer.execute).to eq(true)
end
it 'does not import the repository if it already exists' do
expect(project)
.to receive(:empty_repo?)
.and_return(false)
expect(importer)
.to receive(:import_wiki?)
.and_return(true)
expect(importer)
.not_to receive(:import_repository)
expect(importer)
.to receive(:import_wiki_repository)
.and_return(true)
expect(importer)
.to receive(:update_clone_time)
expect(importer.execute).to eq(true)
end
it 'does not import the wiki if it is disabled' do
expect(project)
.to receive(:empty_repo?)
.and_return(true)
expect(importer)
.to receive(:import_wiki?)
.and_return(false)
expect(importer)
.to receive(:import_repository)
.and_return(true)
expect(importer)
.to receive(:update_clone_time)
expect(importer)
.not_to receive(:import_wiki_repository)
expect(importer.execute).to eq(true)
end
it 'does not import the wiki if the repository could not be imported' do
expect(project)
.to receive(:empty_repo?)
.and_return(true)
expect(importer)
.to receive(:import_wiki?)
.and_return(true)
expect(importer)
.to receive(:import_repository)
.and_return(false)
expect(importer)
.not_to receive(:update_clone_time)
expect(importer)
.not_to receive(:import_wiki_repository)
expect(importer.execute).to eq(false)
end
end
describe '#import_repository' do
it 'imports the repository' do
repo = double(:repo, default_branch: 'develop')
expect(client)
.to receive(:repository)
.with('foo/bar')
.and_return(repo)
expect(project)
.to receive(:change_head)
.with('develop')
expect(project)
.to receive(:ensure_repository)
expect(repository)
.to receive(:fetch_as_mirror)
.with(project.import_url, refmap: Gitlab::GithubImport.refmap, forced: true, remote_name: 'github')
service = double
expect(Projects::HousekeepingService)
.to receive(:new).with(project, :gc).and_return(service)
expect(service).to receive(:execute)
expect(importer.import_repository).to eq(true)
end
it 'marks the import as failed when an error was raised' do
expect(project).to receive(:ensure_repository)
.and_raise(Gitlab::Git::Repository::NoRepository)
expect(importer)
.to receive(:fail_import)
.and_return(false)
expect(importer.import_repository).to eq(false)
end
end
describe '#import_wiki_repository' do
it 'imports the wiki repository' do
expect(wiki_repository)
.to receive(:import_repository)
.with(importer.wiki_url)
.and_return(true)
expect(importer.import_wiki_repository).to eq(true)
end
it 'marks the import as failed and creates an empty repo if an error was raised' do
expect(wiki_repository)
.to receive(:import_repository)
.with(importer.wiki_url)
.and_raise(Gitlab::Git::CommandError)
expect(importer)
.to receive(:fail_import)
.and_return(false)
expect(project)
.to receive(:create_wiki)
expect(importer.import_wiki_repository).to eq(false)
end
end
describe '#fail_import' do
it 'marks the import as failed' do
expect(project.import_state).to receive(:mark_as_failed).with('foo')
expect(importer.fail_import('foo')).to eq(false)
end
end
describe '#update_clone_time' do
it 'sets the timestamp for when the cloning process finished' do
freeze_time do
expect(project)
.to receive(:update_column)
.with(:last_repository_updated_at, Time.zone.now)
importer.update_clone_time
end
end
end
end
| 24.912409 | 107 | 0.635804 |
01f1c80781d10681398994876bfa7917bbcfe147 | 4,423 | require 'spec_helper'
describe 'nfs::server' do
%w(5.11 6.8 8).each do |release|
context "on Centos #{release}" do
cached(:chef_run) do
ChefSpec::ServerRunner.new(platform: 'centos', version: release).converge(described_recipe)
end
it 'includes recipe nfs::_common' do
expect(chef_run).to include_recipe('nfs::_common')
end
case release
when '5.11', '6.8'
%w(nfs).each do |svc|
it "starts the #{svc} service" do
expect(chef_run).to start_service(svc)
end
it "enables the #{svc} service" do
expect(chef_run).to enable_service(svc)
end
end
when '8'
%w(nfs-server).each do |svc|
it "starts the #{svc} service" do
expect(chef_run).to start_service(svc)
end
it "enables the #{svc} service" do
expect(chef_run).to enable_service(svc)
end
end
end
end
end
context 'on Amazon 2014.09' do
cached(:chef_run) do
ChefSpec::ServerRunner.new(platform: 'amazon', version: '2015.03').converge(described_recipe)
end
it 'includes recipe nfs::_common' do
expect(chef_run).to include_recipe('nfs::_common')
end
%w(nfs).each do |svc|
it "starts the #{svc} service" do
expect(chef_run).to start_service(svc)
end
it "enables the #{svc} service" do
expect(chef_run).to enable_service(svc)
end
end
end
context 'on FreeBSD' do
cached(:chef_run) do
ChefSpec::ServerRunner.new(platform: 'freebsd', version: '11.1').converge(described_recipe)
end
it 'includes recipe nfs::_common' do
expect(chef_run).to include_recipe('nfs::_common')
end
%w(nfsd).each do |svc|
it "starts the #{svc} service" do
expect(chef_run).to start_service(svc)
end
it "enables the #{svc} service" do
expect(chef_run).to enable_service(svc)
end
end
it 'creates /etc/rc.conf.d/mountd with mountd flags -r -p 32767' do
expect(chef_run).to render_file('/etc/rc.conf.d/mountd').with_content(/mountd_flags="?-r +-p +32767"?/)
end
it 'creates /etc/rc.conf.d/nfsd with server flags -u -t -n 24' do
expect(chef_run).to render_file('/etc/rc.conf.d/nfsd').with_content(/server_flags="?-u +-t +-n +24"?/)
end
end
%w(18.04 16.04).each do |release|
# Submit Ubuntu Fauxhai to https://github.com/customink/fauxhai for better Ubuntu coverage
context "on Ubuntu #{release}" do
cached(:chef_run) do
ChefSpec::ServerRunner.new(platform: 'ubuntu', version: release).converge(described_recipe)
end
it 'includes recipe nfs::_common' do
expect(chef_run).to include_recipe('nfs::_common')
end
it 'creates file /etc/default/nfs-kernel-server with: RPCMOUNTDOPTS="-p 32767"' do
expect(chef_run).to render_file('/etc/default/nfs-kernel-server').with_content(/RPCMOUNTDOPTS="-p +32767"/)
end
%w(nfs-kernel-server).each do |nfs|
it "installs package #{nfs}" do
expect(chef_run).to install_package(nfs)
end
it "starts the #{nfs} service" do
expect(chef_run).to start_service(nfs)
end
it "enables the #{nfs} service" do
expect(chef_run).to enable_service(nfs)
end
end
end
end
context 'on Debian 8.9' do
cached(:chef_run) do
ChefSpec::ServerRunner.new(platform: 'debian', version: '8.9').converge(described_recipe)
end
it 'includes recipe nfs::_common' do
expect(chef_run).to include_recipe('nfs::_common')
end
it 'creates file /etc/default/nfs-kernel-server with: RPCMOUNTDOPTS="-p 32767"' do
expect(chef_run).to render_file('/etc/default/nfs-kernel-server').with_content(/RPCMOUNTDOPTS="-p +32767"/)
end
it 'creates file /etc/default/nfs-kernel-server with: RPCNFSDCOUNT="8"' do
expect(chef_run).to render_file('/etc/default/nfs-kernel-server').with_content(/RPCNFSDCOUNT="8"/)
end
%w(nfs-kernel-server).each do |nfs|
it "installs package #{nfs}" do
expect(chef_run).to install_package(nfs)
end
it "starts the #{nfs} service" do
expect(chef_run).to start_service(nfs)
end
it "enables the #{nfs} service" do
expect(chef_run).to enable_service(nfs)
end
end
end
end
| 29.486667 | 115 | 0.624237 |
01fd4e752e0563ed5895d79b38579128c1c8ed8d | 144 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_packing-list_session'
| 36 | 82 | 0.805556 |
e8b92fe237a26a5f9a6b424ac1bf3e6e25b20d1a | 1,047 | require 'test_helper'
class OrdersControllerTest < ActionController::TestCase
test "new order path should create new order with id set to service_id parameter" do
get "new"
assert_response :success
assert_not_nil assigns(:order)
assert_not_nil :order.service_id
end
test 'A new order should trigger confirmation email' do
# Test that ActionMailer deliveries receives new email when post sent
assert_difference 'ActionMailer::Base.deliveries.size', +1, 'email not delivered' do
post :create, order: {email: '[email protected]',
customer: 'beau davenport',
customer_info: '2/24/1988, St. Louis, MO 3:40',
session_id: 1}
end
test_confirmation_email = ActionMailer::Base.deliveries.last
assert_equal 'Thank you for your order!', test_confirmation_email.subject, 'Subject mismatch'
assert_equal '[email protected]', test_confirmation_email.to[0], 'to address mismatch'
assert_equal '[email protected]', test_confirmation_email.from[0], 'from address mismatch'
end
end
| 36.103448 | 97 | 0.729704 |
7ab315cbd7a241e8bdd13dcfa0a21fb72e54226a | 2,562 | require "administrate/base_dashboard"
class UserDashboard < Administrate::BaseDashboard
# ATTRIBUTE_TYPES
# a hash that describes the type of each of the model's fields.
#
# Each different type represents an Administrate::Field object,
# which determines how the attribute is displayed
# on pages throughout the dashboard.
ATTRIBUTE_TYPES = {
id: Field::Number,
email: Field::String,
encrypted_password: Field::String,
reset_password_token: Field::String,
reset_password_sent_at: Field::DateTime,
remember_created_at: Field::DateTime,
sign_in_count: Field::Number,
current_sign_in_at: Field::DateTime,
last_sign_in_at: Field::DateTime,
current_sign_in_ip: Field::String,
last_sign_in_ip: Field::String,
created_at: Field::DateTime,
updated_at: Field::DateTime,
name: Field::String,
confirmation_token: Field::String,
confirmed_at: Field::DateTime,
confirmation_sent_at: Field::DateTime,
unconfirmed_email: Field::String,
}.freeze
# COLLECTION_ATTRIBUTES
# an array of attributes that will be displayed on the model's index page.
#
# By default, it's limited to four items to reduce clutter on index pages.
# Feel free to add, remove, or rearrange items.
COLLECTION_ATTRIBUTES = [
:id,
:email,
:encrypted_password,
:reset_password_token,
].freeze
# SHOW_PAGE_ATTRIBUTES
# an array of attributes that will be displayed on the model's show page.
SHOW_PAGE_ATTRIBUTES = [
:id,
:email,
:encrypted_password,
:reset_password_token,
:reset_password_sent_at,
:remember_created_at,
:sign_in_count,
:current_sign_in_at,
:last_sign_in_at,
:current_sign_in_ip,
:last_sign_in_ip,
:created_at,
:updated_at,
:name,
:confirmation_token,
:confirmed_at,
:confirmation_sent_at,
:unconfirmed_email,
].freeze
# FORM_ATTRIBUTES
# an array of attributes that will be displayed
# on the model's form (`new` and `edit`) pages.
FORM_ATTRIBUTES = [
:email,
:encrypted_password,
:reset_password_token,
:reset_password_sent_at,
:remember_created_at,
:sign_in_count,
:current_sign_in_at,
:last_sign_in_at,
:current_sign_in_ip,
:last_sign_in_ip,
:name,
:confirmation_token,
:confirmed_at,
:confirmation_sent_at,
:unconfirmed_email,
].freeze
# Overwrite this method to customize how users are displayed
# across all pages of the admin dashboard.
#
# def display_resource(user)
# "User ##{user.id}"
# end
end
| 27.255319 | 76 | 0.708041 |
186eb81ac50237caeda5ffa66afcd1f7a1fcd4f0 | 53 | class Token
def name
"jonathan colby"
end
end | 10.6 | 20 | 0.679245 |
876e41f1a704093a6522a89e7261584225f4c148 | 9,165 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_09_01
#
# AvailableDelegations
#
class AvailableDelegations
include MsRestAzure
#
# Creates and initializes a new instance of the AvailableDelegations class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [NetworkManagementClient] reference to the NetworkManagementClient
attr_reader :client
#
# Gets all of the available subnet delegations for this subscription in this
# region.
#
# @param location [String] The location of the subnet.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<AvailableDelegation>] operation results.
#
def list(location, custom_headers:nil)
first_page = list_as_lazy(location, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Gets all of the available subnet delegations for this subscription in this
# region.
#
# @param location [String] The location of the subnet.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_with_http_info(location, custom_headers:nil)
list_async(location, custom_headers:custom_headers).value!
end
#
# Gets all of the available subnet delegations for this subscription in this
# region.
#
# @param location [String] The location of the subnet.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_async(location, custom_headers:nil)
fail ArgumentError, 'location is nil' if location.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/availableDelegations'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'location' => location,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2019_09_01::Models::AvailableDelegationsResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets all of the available subnet delegations for this subscription in this
# region.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AvailableDelegationsResult] operation results.
#
def list_next(next_page_link, custom_headers:nil)
response = list_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets all of the available subnet delegations for this subscription in this
# region.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_next_with_http_info(next_page_link, custom_headers:nil)
list_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Gets all of the available subnet delegations for this subscription in this
# region.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2019_09_01::Models::AvailableDelegationsResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets all of the available subnet delegations for this subscription in this
# region.
#
# @param location [String] The location of the subnet.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [AvailableDelegationsResult] which provide lazy access to pages of
# the response.
#
def list_as_lazy(location, custom_headers:nil)
response = list_async(location, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 39.166667 | 141 | 0.688489 |
87adb56ef5b55e034a004a83cb9a65986afcf3d8 | 1,249 | require_relative '../spec_helper'
describe 'ValidatesType' do
context 'validates_type :attribute' do
subject { ActiveModel::TypeValidationTestClass.set_accessor_and_long_validator(:string) }
it 'adds a validator to the subject' do
klass = subject.class
expect(klass.validators).to_not be_empty
expect(klass.validators).to include(ActiveModel::Validations::TypeValidator)
end
it 'adds the correct validator to the subject' do
validator = subject.class.validators.find { |v| v.is_a?(ActiveModel::Validations::TypeValidator) }
expect(validator.options[:type]).to eq(:string)
end
end
context 'validates :attribute, type: { type: type }.merge(other_options)' do
subject { ActiveModel::TypeValidationTestClass.set_accessor_and_validator(:string) }
it 'adds a validator to the subject' do
klass = subject.class
expect(klass.validators).to_not be_empty
expect(klass.validators).to include(ActiveModel::Validations::TypeValidator)
end
it 'adds the correct validator to the subject' do
validator = subject.class.validators.find { |v| v.is_a?(ActiveModel::Validations::TypeValidator) }
expect(validator.options[:type]).to eq(:string)
end
end
end
| 36.735294 | 104 | 0.726181 |
260caef616c3789828a24a186488b616d02495ff | 231 | #---
# Excerpted from "Ruby on Rails, 2nd Ed."
# We make no guarantees that this code is fit for any purpose.
# Visit http://www.editions-eyrolles.com/Livre/9782212120790/ for more book information.
#---
module LanguageHelper
end
| 28.875 | 88 | 0.74026 |
18e92d38166db4f6e2699d8d649b804fa4116102 | 1,780 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
module API
module Utilities
module RepresenterToJsonCache
def to_json(*)
if json_cacheable?
OpenProject::Cache.fetch(*json_representer_name_cache_key, *json_cache_key) do
super
end
else
super
end
end
def json_cacheable?
true
end
def json_cache_key
raise NotImplementedError
end
private
def json_representer_name_cache_key
self.class.name.to_s.split('::') + ['json', I18n.locale]
end
end
end
end
| 29.666667 | 91 | 0.705618 |
f88c5b68a630f8202abe3df8d3374bc773f091aa | 1,688 | module Issues
class ExportCsvService
include Gitlab::Routing.url_helpers
include GitlabRoutingHelper
# Target attachment size before base64 encoding
TARGET_FILESIZE = 15000000
def initialize(issues_relation)
@issues = issues_relation
@labels = @issues.labels_hash
end
def csv_data
csv_builder.render(TARGET_FILESIZE)
end
def email(user, project)
Notify.issues_csv_email(user, project, csv_data, csv_builder.status).deliver_now
end
def csv_builder
@csv_builder ||=
CsvBuilder.new(@issues.includes(:author, :assignees), header_to_value_hash)
end
private
def header_to_value_hash
{
'Issue ID' => 'iid',
'URL' => -> (issue) { issue_url(issue) },
'Title' => 'title',
'State' => -> (issue) { issue.closed? ? 'Closed' : 'Open' },
'Description' => 'description',
'Author' => 'author_name',
'Author Username' => -> (issue) { issue.author&.username },
'Assignee' => -> (issue) { issue.assignees.map(&:name).join(', ') },
'Assignee Username' => -> (issue) { issue.assignees.map(&:username).join(', ') },
'Confidential' => -> (issue) { issue.confidential? ? 'Yes' : 'No' },
'Due Date' => -> (issue) { issue.due_date&.to_s(:csv) },
'Created At (UTC)' => -> (issue) { issue.created_at&.to_s(:csv) },
'Updated At (UTC)' => -> (issue) { issue.updated_at&.to_s(:csv) },
'Closed At (UTC)' => -> (issue) { issue.closed_at&.to_s(:csv) },
'Milestone' => -> (issue) { issue.milestone&.title },
'Labels' => -> (issue) { @labels[issue.id].sort.join(',').presence }
}
end
end
end
| 33.098039 | 88 | 0.587085 |
33bc25bf297559837ae62b04786f96a44b715489 | 1,330 | require 'fileutils'
cask '[email protected]' do
version '2017.1.2f1,cc85bf6a8a04'
sha256 '1f4159eef6a588cc26ce3317574258f39928fcd04f244e0748be13a384a19d8f'
url "http://download.unity3d.com/download_unity/#{version.after_comma}/MacEditorInstaller/Unity.pkg"
name 'Unity Editor'
homepage 'https://unity3d.com/unity/'
pkg 'Unity.pkg'
preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
end
postflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity-#{@cask.version.before_comma}"
end
if File.exist? "/Applications/Unity.temp"
FileUtils.move "/Applications/Unity.temp", "/Applications/Unity"
end
end
uninstall_preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
if File.exist? "/Applications/Unity-#{@cask.version.before_comma}"
FileUtils.move "/Applications/Unity-#{@cask.version.before_comma}", "/Applications/Unity"
end
end
uninstall_postflight do
if File.exist? "/Applications/Unity.temp"
FileUtils.move "/Applications/Unity.temp", "/Applications/Unity"
end
end
uninstall quit: '',
pkgutil: ''
end
| 27.708333 | 102 | 0.7 |
01df70824befe14e64f3321d696584952c311645 | 4,772 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "react_api_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end | 45.447619 | 114 | 0.762364 |
bbf4067b8e98dfac522047de7da8117feed1e692 | 1,336 | # frozen_string_literal: true
module Sourcescrub
# Models
module Models
# Tag
class CompanyItems < Entity
attr_accessor :domain, :total, :items, :type
def parse_response_items(domain, kclass_name, response)
headers = response.dig('headers')
headers&.keys&.each do |attr_name|
self.class.send(:define_method, attr_name.gsub('-', '_').to_sym) do
headers[attr_name]
end
end
dynamic_define_method(self, 'domain', domain)
dynamic_define_method(self, 'type', kclass_name)
dynamic_define_method(self, 'total', response.dig(total_key) || 0)
dynamic_define_method(self, 'items', company_items(kclass_name, response.dig(items_key) || []))
self
end
private
def company_items(kclass_name, items)
items.each_with_object([]) do |item, results|
results << kclass_name.new.parse_response(item)
end
end
def total_key
case type_name
when 'Person'
'totalPeople'
else
'total'
end
end
def items_key
case type_name
when 'Person'
'peopleAllocations'
else
'items'
end
end
def type_name
type.name.split('::')[-1]
end
end
end
end
| 23.438596 | 103 | 0.584581 |
4a70424ec01568acd93427dbc70403c8f44d70df | 325 | # frozen_string_literal: true
class Projects::DeployTokensController < Projects::ApplicationController
before_action :authorize_admin_project!
def revoke
@token = @project.deploy_tokens.find(params[:id])
@token.revoke!
redirect_to project_settings_ci_cd_path(project, anchor: 'js-deploy-tokens')
end
end
| 25 | 80 | 0.778462 |
bbc4d3c1db941999220cf9fa9fb68a4827c5cec2 | 14,900 | # -*- coding: utf-8 -*-
# Redmine - project management software
# Copyright (C) 2006-2014 Jean-Philippe Lang
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
require File.expand_path('../../test_helper', __FILE__)
class TimeEntryReportsControllerTest < ActionController::TestCase
tests TimelogController
fixtures :projects, :enabled_modules, :roles, :members, :member_roles,
:issues, :time_entries, :users, :trackers, :enumerations,
:issue_statuses, :custom_fields, :custom_values,
:projects_trackers, :custom_fields_trackers,
:custom_fields_projects
include Redmine::I18n
def setup
Setting.default_language = "en"
end
def test_report_at_project_level
get :report, :project_id => 'ecookbook'
assert_response :success
assert_template 'report'
assert_tag :form,
:attributes => {:action => "/projects/ecookbook/time_entries/report", :id => 'query_form'}
end
def test_report_all_projects
get :report
assert_response :success
assert_template 'report'
assert_tag :form,
:attributes => {:action => "/time_entries/report", :id => 'query_form'}
end
def test_report_all_projects_denied
r = Role.anonymous
r.permissions.delete(:view_time_entries)
r.permissions_will_change!
r.save
get :report
assert_redirected_to '/login?back_url=http%3A%2F%2Ftest.host%2Ftime_entries%2Freport'
end
def test_report_all_projects_one_criteria
get :report, :columns => 'week', :from => "2007-04-01", :to => "2007-04-30", :criteria => ['project']
assert_response :success
assert_template 'report'
assert_not_nil assigns(:report)
assert_equal "8.65", "%.2f" % assigns(:report).total_hours
end
def test_report_all_time
get :report, :project_id => 1, :criteria => ['project', 'issue']
assert_response :success
assert_template 'report'
assert_not_nil assigns(:report)
assert_equal "162.90", "%.2f" % assigns(:report).total_hours
end
def test_report_all_time_by_day
get :report, :project_id => 1, :criteria => ['project', 'issue'], :columns => 'day'
assert_response :success
assert_template 'report'
assert_not_nil assigns(:report)
assert_equal "162.90", "%.2f" % assigns(:report).total_hours
assert_tag :tag => 'th', :content => '2007-03-12'
end
def test_report_one_criteria
get :report, :project_id => 1, :columns => 'week', :from => "2007-04-01", :to => "2007-04-30", :criteria => ['project']
assert_response :success
assert_template 'report'
assert_not_nil assigns(:report)
assert_equal "8.65", "%.2f" % assigns(:report).total_hours
end
def test_report_two_criteria
get :report, :project_id => 1, :columns => 'month', :from => "2007-01-01", :to => "2007-12-31", :criteria => ["user", "activity"]
assert_response :success
assert_template 'report'
assert_not_nil assigns(:report)
assert_equal "162.90", "%.2f" % assigns(:report).total_hours
end
def test_report_custom_field_criteria_with_multiple_values_on_single_value_custom_field_should_not_fail
field = TimeEntryCustomField.create!(:name => 'multi', :field_format => 'list', :possible_values => ['value1', 'value2'])
entry = TimeEntry.create!(:project => Project.find(1), :hours => 1, :activity_id => 10, :user => User.find(2), :spent_on => Date.today)
CustomValue.create!(:customized => entry, :custom_field => field, :value => 'value1')
CustomValue.create!(:customized => entry, :custom_field => field, :value => 'value2')
get :report, :project_id => 1, :columns => 'day', :criteria => ["cf_#{field.id}"]
assert_response :success
end
def test_report_multiple_values_custom_fields_should_not_be_proposed
TimeEntryCustomField.create!(:name => 'Single', :field_format => 'list', :possible_values => ['value1', 'value2'])
TimeEntryCustomField.create!(:name => 'Multi', :field_format => 'list', :multiple => true, :possible_values => ['value1', 'value2'])
get :report, :project_id => 1
assert_response :success
assert_select 'select[name=?]', 'criteria[]' do
assert_select 'option', :text => 'Single'
assert_select 'option', :text => 'Multi', :count => 0
end
end
def test_report_one_day
get :report, :project_id => 1, :columns => 'day', :from => "2007-03-23", :to => "2007-03-23", :criteria => ["user", "activity"]
assert_response :success
assert_template 'report'
assert_not_nil assigns(:report)
assert_equal "4.25", "%.2f" % assigns(:report).total_hours
end
def test_report_at_issue_level
get :report, :issue_id => 1, :columns => 'month', :from => "2007-01-01", :to => "2007-12-31", :criteria => ["user", "activity"]
assert_response :success
assert_template 'report'
assert_not_nil assigns(:report)
assert_equal "154.25", "%.2f" % assigns(:report).total_hours
assert_tag :form,
:attributes => {:action => "/issues/1/time_entries/report", :id => 'query_form'}
end
def test_report_by_week_should_use_commercial_year
TimeEntry.delete_all
TimeEntry.generate!(:hours => '2', :spent_on => '2009-12-25') # 2009-52
TimeEntry.generate!(:hours => '4', :spent_on => '2009-12-31') # 2009-53
TimeEntry.generate!(:hours => '8', :spent_on => '2010-01-01') # 2009-53
TimeEntry.generate!(:hours => '16', :spent_on => '2010-01-05') # 2010-1
get :report, :columns => 'week', :from => "2009-12-25", :to => "2010-01-05", :criteria => ["project"]
assert_response :success
assert_select '#time-report thead tr' do
assert_select 'th:nth-child(1)', :text => 'Project'
assert_select 'th:nth-child(2)', :text => '2009-52'
assert_select 'th:nth-child(3)', :text => '2009-53'
assert_select 'th:nth-child(4)', :text => '2010-1'
assert_select 'th:nth-child(5)', :text => 'Total time'
end
assert_select '#time-report tbody tr' do
assert_select 'td:nth-child(1)', :text => 'eCookbook'
assert_select 'td:nth-child(2)', :text => '2.00'
assert_select 'td:nth-child(3)', :text => '12.00'
assert_select 'td:nth-child(4)', :text => '16.00'
assert_select 'td:nth-child(5)', :text => '30.00' # Total
end
end
def test_report_should_propose_association_custom_fields
get :report
assert_response :success
assert_template 'report'
assert_select 'select[name=?]', 'criteria[]' do
assert_select 'option[value=cf_1]', {:text => 'Database'}, 'Issue custom field not found'
assert_select 'option[value=cf_3]', {:text => 'Development status'}, 'Project custom field not found'
assert_select 'option[value=cf_7]', {:text => 'Billable'}, 'TimeEntryActivity custom field not found'
end
end
def test_report_with_association_custom_fields
get :report, :criteria => ['cf_1', 'cf_3', 'cf_7']
assert_response :success
assert_template 'report'
assert_not_nil assigns(:report)
assert_equal 3, assigns(:report).criteria.size
assert_equal "162.90", "%.2f" % assigns(:report).total_hours
# Custom fields columns
assert_select 'th', :text => 'Database'
assert_select 'th', :text => 'Development status'
assert_select 'th', :text => 'Billable'
# Custom field row
assert_select 'tr' do
assert_select 'td', :text => 'MySQL'
assert_select 'td.hours', :text => '1.00'
end
end
def test_report_one_criteria_no_result
get :report, :project_id => 1, :columns => 'week', :from => "1998-04-01", :to => "1998-04-30", :criteria => ['project']
assert_response :success
assert_template 'report'
assert_not_nil assigns(:report)
assert_equal "0.00", "%.2f" % assigns(:report).total_hours
end
def test_report_status_criterion
get :report, :project_id => 1, :criteria => ['status']
assert_response :success
assert_template 'report'
assert_tag :tag => 'th', :content => 'Status'
assert_tag :tag => 'td', :content => 'New'
end
def test_report_all_projects_csv_export
get :report, :columns => 'month', :from => "2007-01-01", :to => "2007-06-30",
:criteria => ["project", "user", "activity"], :format => "csv"
assert_response :success
assert_equal 'text/csv; header=present', @response.content_type
lines = @response.body.chomp.split("\n")
# Headers
assert_equal 'Project,User,Activity,2007-3,2007-4,Total time', lines.first
# Total row
assert_equal 'Total time,"","",154.25,8.65,162.90', lines.last
end
def test_report_csv_export
get :report, :project_id => 1, :columns => 'month',
:from => "2007-01-01", :to => "2007-06-30",
:criteria => ["project", "user", "activity"], :format => "csv"
assert_response :success
assert_equal 'text/csv; header=present', @response.content_type
lines = @response.body.chomp.split("\n")
# Headers
assert_equal 'Project,User,Activity,2007-3,2007-4,Total time', lines.first
# Total row
assert_equal 'Total time,"","",154.25,8.65,162.90', lines.last
end
def test_csv_big_5
Setting.default_language = "zh-TW"
str_utf8 = "\xe4\xb8\x80\xe6\x9c\x88"
str_big5 = "\xa4@\xa4\xeb"
if str_utf8.respond_to?(:force_encoding)
str_utf8.force_encoding('UTF-8')
str_big5.force_encoding('Big5')
end
user = User.find_by_id(3)
user.firstname = str_utf8
user.lastname = "test-lastname"
assert user.save
comments = "test_csv_big_5"
te1 = TimeEntry.create(:spent_on => '2011-11-11',
:hours => 7.3,
:project => Project.find(1),
:user => user,
:activity => TimeEntryActivity.find_by_name('Design'),
:comments => comments)
te2 = TimeEntry.find_by_comments(comments)
assert_not_nil te2
assert_equal 7.3, te2.hours
assert_equal 3, te2.user_id
get :report, :project_id => 1, :columns => 'day',
:from => "2011-11-11", :to => "2011-11-11",
:criteria => ["user"], :format => "csv"
assert_response :success
assert_equal 'text/csv; header=present', @response.content_type
lines = @response.body.chomp.split("\n")
# Headers
s1 = "\xa5\xce\xa4\xe1,2011-11-11,\xa4u\xae\xc9\xc1`\xadp"
s2 = "\xa4u\xae\xc9\xc1`\xadp"
if s1.respond_to?(:force_encoding)
s1.force_encoding('Big5')
s2.force_encoding('Big5')
end
assert_equal s1, lines.first
# Total row
assert_equal "#{str_big5} #{user.lastname},7.30,7.30", lines[1]
assert_equal "#{s2},7.30,7.30", lines[2]
str_tw = "Traditional Chinese (\xe7\xb9\x81\xe9\xab\x94\xe4\xb8\xad\xe6\x96\x87)"
if str_tw.respond_to?(:force_encoding)
str_tw.force_encoding('UTF-8')
end
assert_equal str_tw, l(:general_lang_name)
assert_equal 'Big5', l(:general_csv_encoding)
assert_equal ',', l(:general_csv_separator)
assert_equal '.', l(:general_csv_decimal_separator)
end
def test_csv_cannot_convert_should_be_replaced_big_5
Setting.default_language = "zh-TW"
str_utf8 = "\xe4\xbb\xa5\xe5\x86\x85"
if str_utf8.respond_to?(:force_encoding)
str_utf8.force_encoding('UTF-8')
end
user = User.find_by_id(3)
user.firstname = str_utf8
user.lastname = "test-lastname"
assert user.save
comments = "test_replaced"
te1 = TimeEntry.create(:spent_on => '2011-11-11',
:hours => 7.3,
:project => Project.find(1),
:user => user,
:activity => TimeEntryActivity.find_by_name('Design'),
:comments => comments)
te2 = TimeEntry.find_by_comments(comments)
assert_not_nil te2
assert_equal 7.3, te2.hours
assert_equal 3, te2.user_id
get :report, :project_id => 1, :columns => 'day',
:from => "2011-11-11", :to => "2011-11-11",
:criteria => ["user"], :format => "csv"
assert_response :success
assert_equal 'text/csv; header=present', @response.content_type
lines = @response.body.chomp.split("\n")
# Headers
s1 = "\xa5\xce\xa4\xe1,2011-11-11,\xa4u\xae\xc9\xc1`\xadp"
if s1.respond_to?(:force_encoding)
s1.force_encoding('Big5')
end
assert_equal s1, lines.first
# Total row
s2 = ""
if s2.respond_to?(:force_encoding)
s2 = "\xa5H?"
s2.force_encoding('Big5')
elsif RUBY_PLATFORM == 'java'
s2 = "??"
else
s2 = "\xa5H???"
end
assert_equal "#{s2} #{user.lastname},7.30,7.30", lines[1]
end
def test_csv_fr
with_settings :default_language => "fr" do
str1 = "test_csv_fr"
user = User.find_by_id(3)
te1 = TimeEntry.create(:spent_on => '2011-11-11',
:hours => 7.3,
:project => Project.find(1),
:user => user,
:activity => TimeEntryActivity.find_by_name('Design'),
:comments => str1)
te2 = TimeEntry.find_by_comments(str1)
assert_not_nil te2
assert_equal 7.3, te2.hours
assert_equal 3, te2.user_id
get :report, :project_id => 1, :columns => 'day',
:from => "2011-11-11", :to => "2011-11-11",
:criteria => ["user"], :format => "csv"
assert_response :success
assert_equal 'text/csv; header=present', @response.content_type
lines = @response.body.chomp.split("\n")
# Headers
s1 = "Utilisateur;2011-11-11;Temps total"
s2 = "Temps total"
if s1.respond_to?(:force_encoding)
s1.force_encoding('ISO-8859-1')
s2.force_encoding('ISO-8859-1')
end
assert_equal s1, lines.first
# Total row
assert_equal "#{user.firstname} #{user.lastname};7,30;7,30", lines[1]
assert_equal "#{s2};7,30;7,30", lines[2]
str_fr = "Fran\xc3\xa7ais"
if str_fr.respond_to?(:force_encoding)
str_fr.force_encoding('UTF-8')
end
assert_equal str_fr, l(:general_lang_name)
assert_equal 'ISO-8859-1', l(:general_csv_encoding)
assert_equal ';', l(:general_csv_separator)
assert_equal ',', l(:general_csv_decimal_separator)
end
end
end
| 38.501292 | 139 | 0.640537 |
4a1b92b6fb17c641daf1ddb338bf09f796a0f275 | 380 | module AnswersHelper
# def user_answer_edit(current_user, answer)
# if current_user == answer.user
# link_to "Edit Answer", edit_answer_path(answer)
# end
#
# end
def user_answer_delete(current_user, question, answer)
if current_user == answer.user
link_to "Delete", question_answer_path(question, answer), :method => 'delete'
end
end
end
| 22.352941 | 83 | 0.694737 |
21d71a2f487413aa1f272d789847dea5e54d12a5 | 664 | require 'spec_helper'
RSpec.describe 'news_items/latest_news.html.erb', type: :view do
subject { render 'news_items/latest_news', news_item: news_item }
let(:news_item) { build :news_item, content: "[[SERVICE_NAME]]\n\nFirst para" }
it { is_expected.to have_css '.latest-news-banner.govuk-notification-banner' }
it { is_expected.to have_css '.latest-news-banner h2', text: news_item.title }
it { is_expected.to have_css '.latest-news-banner .tariff-markdown p' }
it { is_expected.to have_css '.latest-news-banner p', text: /#{I18n.t('title.service_name.uk')}/ }
it { is_expected.to have_link 'Show more ...', href: news_item_path(news_item) }
end
| 47.428571 | 100 | 0.725904 |
611a886d25231e514a61e3209803fbd651d4022e | 22,796 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::CiConfiguration::SastBuildAction do
let(:default_sast_values) do
{ global:
[
{ field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: 'registry.gitlab.com/security-products' }
],
pipeline:
[
{ field: 'stage', default_value: 'test', value: 'test' },
{ field: 'SEARCH_MAX_DEPTH', default_value: 4, value: 4 },
{ field: 'SAST_EXCLUDED_PATHS', default_value: 'spec, test, tests, tmp', value: 'spec, test, tests, tmp' }
] }
end
let(:params) do
{ global:
[
{ field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: 'new_registry' }
],
pipeline:
[
{ field: 'stage', default_value: 'test', value: 'security' },
{ field: 'SEARCH_MAX_DEPTH', default_value: 4, value: 1 },
{ field: 'SAST_EXCLUDED_PATHS', default_value: 'spec, test, tests, tmp', value: 'spec,docs' }
] }
end
let(:params_with_analyzer_info) do
params.merge( { analyzers:
[
{
name: "bandit",
enabled: false
},
{
name: "brakeman",
enabled: true,
variables: [
{ field: "SAST_BRAKEMAN_LEVEL",
default_value: "1",
value: "2" }
]
},
{
name: "flawfinder",
enabled: true,
variables: [
{ field: "SAST_FLAWFINDER_LEVEL",
default_value: "1",
value: "1" }
]
}
] }
)
end
let(:params_with_all_analyzers_enabled) do
params.merge( { analyzers:
[
{
name: "flawfinder",
enabled: true
},
{
name: "brakeman",
enabled: true
}
] }
)
end
context 'with existing .gitlab-ci.yml' do
let(:auto_devops_enabled) { false }
context 'sast has not been included' do
context 'template includes are array' do
let(:gitlab_ci_content) { existing_gitlab_ci_and_template_array_without_sast }
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:action]).to eq('update')
expect(result[:content]).to eq(sast_yaml_two_includes)
end
end
context 'template include is not an array' do
let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_without_sast }
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:action]).to eq('update')
expect(result[:content]).to eq(sast_yaml_two_includes)
end
it 'reports defaults have been overwritten' do
expect(result[:default_values_overwritten]).to eq(true)
end
end
end
context 'sast template include is not an array' do
let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:action]).to eq('update')
expect(result[:content]).to eq(sast_yaml_all_params)
end
end
context 'with default values' do
let(:params) { default_sast_values }
let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:content]).to eq(sast_yaml_with_no_variables_set)
end
it 'reports defaults have not been overwritten' do
expect(result[:default_values_overwritten]).to eq(false)
end
context 'analyzer section' do
let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
subject(:result) { described_class.new(auto_devops_enabled, params_with_analyzer_info, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:content]).to eq(sast_yaml_with_no_variables_set_but_analyzers)
end
context 'analyzers are disabled' do
let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
subject(:result) { described_class.new(auto_devops_enabled, params_with_analyzer_info, gitlab_ci_content).generate }
it 'writes SAST_EXCLUDED_ANALYZERS' do
expect(result[:content]).to eq(sast_yaml_with_no_variables_set_but_analyzers)
end
end
context 'all analyzers are enabled' do
let(:gitlab_ci_content) { existing_gitlab_ci_and_single_template_with_sast_and_default_stage }
subject(:result) { described_class.new(auto_devops_enabled, params_with_all_analyzers_enabled, gitlab_ci_content).generate }
it 'does not write SAST_EXCLUDED_ANALYZERS' do
expect(result[:content]).to eq(sast_yaml_with_no_variables_set)
end
end
end
end
context 'with update stage and SEARCH_MAX_DEPTH and set SECURE_ANALYZERS_PREFIX to default' do
let(:params) do
{ global:
[
{ field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: 'registry.gitlab.com/security-products' }
],
pipeline:
[
{ field: 'stage', default_value: 'test', value: 'brand_new_stage' },
{ field: 'SEARCH_MAX_DEPTH', default_value: 4, value: 5 },
{ field: 'SAST_EXCLUDED_PATHS', default_value: 'spec, test, tests, tmp', value: 'spec,docs' }
] }
end
let(:gitlab_ci_content) { existing_gitlab_ci }
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:action]).to eq('update')
expect(result[:content]).to eq(sast_yaml_updated_stage)
end
end
context 'with no existing variables' do
let(:gitlab_ci_content) { existing_gitlab_ci_with_no_variables }
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:action]).to eq('update')
expect(result[:content]).to eq(sast_yaml_variable_section_added)
end
end
context 'with no existing sast config' do
let(:gitlab_ci_content) { existing_gitlab_ci_with_no_sast_section }
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:action]).to eq('update')
expect(result[:content]).to eq(sast_yaml_sast_section_added)
end
end
context 'with no existing sast variables' do
let(:gitlab_ci_content) { existing_gitlab_ci_with_no_sast_variables }
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:action]).to eq('update')
expect(result[:content]).to eq(sast_yaml_sast_variables_section_added)
end
end
def existing_gitlab_ci_and_template_array_without_sast
{ "stages" => %w(test security),
"variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
"sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" },
"include" => [{ "template" => "existing.yml" }] }
end
def existing_gitlab_ci_and_single_template_with_sast_and_default_stage
{ "stages" => %w(test),
"variables" => { "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
"sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "test" },
"include" => { "template" => "Security/SAST.gitlab-ci.yml" } }
end
def existing_gitlab_ci_and_single_template_without_sast
{ "stages" => %w(test security),
"variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
"sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" },
"include" => { "template" => "existing.yml" } }
end
def existing_gitlab_ci_with_no_variables
{ "stages" => %w(test security),
"sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" },
"include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] }
end
def existing_gitlab_ci_with_no_sast_section
{ "stages" => %w(test security),
"variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
"include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] }
end
def existing_gitlab_ci_with_no_sast_variables
{ "stages" => %w(test security),
"variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "localhost:5000/analyzers" },
"sast" => { "stage" => "security" },
"include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] }
end
def existing_gitlab_ci
{ "stages" => %w(test security),
"variables" => { "RANDOM" => "make sure this persists", "SECURE_ANALYZERS_PREFIX" => "bad_prefix" },
"sast" => { "variables" => { "SEARCH_MAX_DEPTH" => 1 }, "stage" => "security" },
"include" => [{ "template" => "Security/SAST.gitlab-ci.yml" }] }
end
end
context 'with no .gitlab-ci.yml' do
let(:gitlab_ci_content) { nil }
context 'autodevops disabled' do
let(:auto_devops_enabled) { false }
context 'with one empty parameter' do
let(:params) do
{ global:
[
{ field: 'SECURE_ANALYZERS_PREFIX', default_value: 'registry.gitlab.com/security-products', value: '' }
] }
end
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:content]).to eq(sast_yaml_with_no_variables_set)
end
end
context 'with all parameters' do
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
it 'generates the correct YML' do
expect(result[:content]).to eq(sast_yaml_all_params)
end
end
end
context 'with autodevops enabled' do
let(:auto_devops_enabled) { true }
subject(:result) { described_class.new(auto_devops_enabled, params, gitlab_ci_content).generate }
before do
allow_next_instance_of(described_class) do |sast_build_action|
allow(sast_build_action).to receive(:auto_devops_stages).and_return(fast_auto_devops_stages)
end
end
it 'generates the correct YML' do
expect(result[:content]).to eq(auto_devops_with_custom_stage)
end
end
end
# stubbing this method allows this spec file to use fast_spec_helper
def fast_auto_devops_stages
auto_devops_template = YAML.safe_load( File.read('lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml') )
auto_devops_template['stages']
end
def sast_yaml_with_no_variables_set_but_analyzers
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
- test
sast:
variables:
SAST_EXCLUDED_ANALYZERS: bandit
SAST_BRAKEMAN_LEVEL: '2'
stage: test
include:
- template: Security/SAST.gitlab-ci.yml
CI_YML
end
def sast_yaml_with_no_variables_set
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
- test
sast:
stage: test
include:
- template: Security/SAST.gitlab-ci.yml
CI_YML
end
def sast_yaml_all_params
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
- test
- security
variables:
SECURE_ANALYZERS_PREFIX: new_registry
sast:
variables:
SAST_EXCLUDED_PATHS: spec,docs
SEARCH_MAX_DEPTH: 1
stage: security
include:
- template: Security/SAST.gitlab-ci.yml
CI_YML
end
def auto_devops_with_custom_stage
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
- build
- test
- deploy
- review
- dast
- staging
- canary
- production
- incremental rollout 10%
- incremental rollout 25%
- incremental rollout 50%
- incremental rollout 100%
- performance
- cleanup
- security
variables:
SECURE_ANALYZERS_PREFIX: new_registry
sast:
variables:
SAST_EXCLUDED_PATHS: spec,docs
SEARCH_MAX_DEPTH: 1
stage: security
include:
- template: Auto-DevOps.gitlab-ci.yml
CI_YML
end
def sast_yaml_two_includes
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
- test
- security
variables:
RANDOM: make sure this persists
SECURE_ANALYZERS_PREFIX: new_registry
sast:
variables:
SAST_EXCLUDED_PATHS: spec,docs
SEARCH_MAX_DEPTH: 1
stage: security
include:
- template: existing.yml
- template: Security/SAST.gitlab-ci.yml
CI_YML
end
def sast_yaml_variable_section_added
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
- test
- security
sast:
variables:
SAST_EXCLUDED_PATHS: spec,docs
SEARCH_MAX_DEPTH: 1
stage: security
include:
- template: Security/SAST.gitlab-ci.yml
variables:
SECURE_ANALYZERS_PREFIX: new_registry
CI_YML
end
def sast_yaml_sast_section_added
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
- test
- security
variables:
RANDOM: make sure this persists
SECURE_ANALYZERS_PREFIX: new_registry
include:
- template: Security/SAST.gitlab-ci.yml
sast:
variables:
SAST_EXCLUDED_PATHS: spec,docs
SEARCH_MAX_DEPTH: 1
stage: security
CI_YML
end
def sast_yaml_sast_variables_section_added
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
- test
- security
variables:
RANDOM: make sure this persists
SECURE_ANALYZERS_PREFIX: new_registry
sast:
stage: security
variables:
SAST_EXCLUDED_PATHS: spec,docs
SEARCH_MAX_DEPTH: 1
include:
- template: Security/SAST.gitlab-ci.yml
CI_YML
end
def sast_yaml_updated_stage
<<-CI_YML.strip_heredoc
# You can override the included template(s) by including variable overrides
# SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings
# Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings
# Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings
# Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings
# Note that environment variables can be set in several places
# See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence
stages:
- test
- security
- brand_new_stage
variables:
RANDOM: make sure this persists
sast:
variables:
SAST_EXCLUDED_PATHS: spec,docs
SEARCH_MAX_DEPTH: 5
stage: brand_new_stage
include:
- template: Security/SAST.gitlab-ci.yml
CI_YML
end
end
| 41.750916 | 161 | 0.670951 |
626e5d54ae94be9ce47302f19f7ded23b043ab84 | 109 | class UsersController < Muck::UsersController
def signup_complete
redirect_to profiles_path
end
end | 15.571429 | 45 | 0.798165 |
876391c9a1936446b206a2a8b796e97f9981c3fe | 3,072 | # The 0.9.0 tag in homebrew repository isn't working in OSX Mavericks
# this version fixes Cpp11 problems about syntax and tr1 headers
# All patches and this file can be found in this public gist:
# https://gist.githubusercontent.com/rafaelverger/58b6eeafaae7d28b06cc
class Thrift090 < Formula
homepage "http://thrift.apache.org"
url "https://archive.apache.org/dist/thrift/0.9.0/thrift-0.9.0.tar.gz"
sha256 "71d129c49a2616069d9e7a93268cdba59518f77b3c41e763e09537cb3f3f0aac"
bottle do
root_url "https://homebrew.bintray.com/bottles-versions"
cellar :any
sha256 "dea7ccc0fb965a709abddfad87d5ecb886e0b5d2f4618622d320f259bccf7aed" => :yosemite
sha256 "a9b9bf0bb4039b83d80b617d20f9a185b2f771c4e72284843c29253bd3fbbdcb" => :mavericks
sha256 "9c83d58ec3c0b0dc1477dc5c3496ba793893441790e7827e5020a5e775e993d5" => :mountain_lion
end
# These patches are 0.9.0-specific and can go away once a newer version is release
[
# patch-tsocket.patch
%w[ca4565122f0a1365f2409bce85dc0b8942459b18 a1dc9e54ffacf04c6ba6d1e37b734684ff09d149a88ca7425a4237267f674829],
# patch-cxx11-compat.patch
%w[8ab0d22b3df198e6b7a14e9da6fd34d2d6218cbf 74fd5282f159bf4d7ee5ca977b36534e2182709fe4c17cc5907c6bd615cfe0ef],
# patch-use-boost-cpp-client-server.patch
%w[50629b8ac1fb3d606185f39cfd7b6a4848e3a93d 2ea5a69c5358a56ef945d4fb127c11a7797afc751743b20f58dfff0955a68117],
# patch-remove-tr1-dependency.patch
%w[7bf1cd9deb7b483845458e901c37ad4d8404a8e7 c4419ce40b7fda9ffd58a5dad7856b64ee84e3c1b820f3a64fed0b01b4bc9c42],
].each do |hash, sha|
patch do
url "https://gist.githubusercontent.com/rafaelverger/58b6eeafaae7d28b06cc/raw/#{hash}"
sha256 sha
end
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "openssl"
depends_on :python => :optional
option "with-haskell", "Install Haskell binding"
option "with-erlang", "Install Erlang binding"
option "with-java", "Install Java binding"
option "with-perl", "Install Perl binding"
option "with-php", "Install Php binding"
def install
args = ["--without-ruby", "--without-tests", "--without-php_extension"]
args << "--without-python" if build.without? "python"
args << "--without-haskell" if build.without? "haskell"
args << "--without-java" if build.without? "java"
args << "--without-perl" if build.without? "perl"
args << "--without-php" if build.without? "php"
args << "--without-erlang" if build.without? "erlang"
ENV.cxx11 if MacOS.version >= :mavericks && ENV.compiler == :clang
# Don't install extensions to /usr:
ENV["PY_PREFIX"] = prefix
ENV["PHP_PREFIX"] = prefix
system "./configure", "--disable-debug",
"--prefix=#{prefix}",
"--libdir=#{lib}",
*args
ENV.j1
system "make", "install"
end
test do
assert_match /Thrift/, shell_output("#{bin}/thrift --version", 1)
end
end
| 38.886076 | 114 | 0.719076 |
6af201161bc28d6ff905dbc80baa0cd6bdb3f56b | 1,791 | class Foma < Formula
desc "Finite-state compiler and C library"
homepage "https://code.google.com/p/foma/"
url "https://bitbucket.org/mhulden/foma/downloads/foma-0.9.18.tar.gz"
sha256 "cb380f43e86fc7b3d4e43186db3e7cff8f2417e18ea69cc991e466a3907d8cbd"
license "GPL-2.0-only"
revision 1 unless OS.mac?
livecheck do
url :stable
end
bottle do
cellar :any
rebuild 1
sha256 "cdf3b3105f0207ddea3f5b0ba458b650cab22b1ac3db85896631ec5304cc5bf1" => :big_sur
sha256 "8cac09b69356887a31f4d2314b9eb7a193ad21858b0cc43ade7d48a485e4b55d" => :arm64_big_sur
sha256 "dc0a238f67280d9e15e50bc7064669f1715170c9a59d608537ed195801db0c9e" => :catalina
sha256 "a3b11300d427959a0ca8aa908d6c43369a8c17889a63f56d7772c6c4fdaeee04" => :mojave
sha256 "d223eaa3a2f821d24b5f3b5486494a1a029f96e1640d4fe6f3633e6ad53e14a9" => :high_sierra
sha256 "1f30a342aeb7b5e51d1238a36ee9021f3264ac502b2544a2e1a5b259c1cbc68f" => :x86_64_linux
end
uses_from_macos "zlib"
on_linux do
depends_on "readline"
end
conflicts_with "freeling", because: "freeling ships its own copy of foma"
def install
system "make"
system "make", "install", "prefix=#{prefix}"
end
test do
# Source: https://code.google.com/p/foma/wiki/ExampleScripts
(testpath/"toysyllabify.script").write <<~EOS
define V [a|e|i|o|u];
define Gli [w|y];
define Liq [r|l];
define Nas [m|n];
define Obs [p|t|k|b|d|g|f|v|s|z];
define Onset (Obs) (Nas) (Liq) (Gli); # Each element is optional.
define Coda Onset.r; # Is mirror image of onset.
define Syllable Onset V Coda;
regex Syllable @> ... "." || _ Syllable;
apply down> abrakadabra
EOS
system "#{bin}/foma", "-f", "toysyllabify.script"
end
end
| 30.87931 | 95 | 0.706868 |
e95696f1b448d18f356f2a5e8190ab32b9de52cc | 2,309 | require 'spec_helper'
describe 'OutCommand' do
def in_dir
Dir.mktmpdir do |working_dir|
yield working_dir
end
end
def add_manifest(manifest)
File.open(manifest, 'w') { |file| file.write('your text') }
end
def add_key(key)
File.open(key, 'w') { |file| file.write('your text') }
end
let(:request) do
{
'source' => {
'access_key_id' => 'test_key',
'secret_access_key' => 'secret_access_key',
'bucket_name' => 'Test',
'region' => 'us-west-1'
},
'params' => {
'stats_file_key' => 'test-manifest-state.json',
'manifest_file' => 'manifest.yml'
}
}
end
let(:bosh) { instance_double(BoshInitDeploymentResource::BoshInit) }
let(:response) { StringIO.new }
let(:stats) { instance_double(BoshInitDeploymentResource::BoshStats) }
let(:command_runner) do
instance_double(BoshInitDeploymentResource::CommandRunner)
end
let(:out_command) do
BoshInitDeploymentResource::OutCommand
.new(bosh, response, stub_responses: true)
end
context 'with valid inputs' do
it 'get stats' do
in_dir do |working_dir|
add_manifest("#{working_dir}/manifest.yml")
add_key("#{working_dir}/microbosh.pem")
allow(command_runner).to receive(:run)
expect(stats).to receive(:status).and_return('Test')
expect(stats).to receive(:status=).with('Test')
expect(stats).to receive(:save)
# expect(bosh).to receive(:setup_environment)
# .with("#{working_dir}/manifest.yml", 'Test')
# expect(bosh).to receive(:deploy).and_return('Deployed')
out = BoshInitDeploymentResource::OutCommand.new(
BoshInitDeploymentResource::BoshInit.new(command_runner), response,
stub_responses: true)
out.run(working_dir, request, stats)
end
end
it 'run deployment' do
in_dir do |working_dir|
add_manifest("#{working_dir}/manifest.yml")
add_key("#{working_dir}/microbosh.pem")
expect(bosh).to receive(:setup_environment)
.with("#{working_dir}/manifest.yml",
"#{working_dir}/microbosh.pem", '')
expect(bosh).to receive(:deploy).and_return('Deployed')
out_command.run(working_dir, request)
end
end
end
end
| 29.602564 | 77 | 0.631442 |
e208ebfccf36f248d5080b3fdd14d1d11a2aca36 | 200 | =begin
Marketplace
API Cloud Loyalty LTM - Webpremios
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end
class VendorOrders < ApplicationRecord
end
| 12.5 | 64 | 0.78 |
189e3a735cf0b4f94da46fae2e220aca5fc33520 | 6,141 | require 'spec_helper'
describe Paperclip::Validators do
context "using the helper" do
before do
rebuild_class
Dummy.validates_attachment :avatar, presence: true, content_type: { content_type: "image/jpeg" }, size: { in: 0..10240 }
end
it "adds the attachment_presence validator to the class" do
assert Dummy.validators_on(:avatar).any?{ |validator| validator.kind == :attachment_presence }
end
it "adds the attachment_content_type validator to the class" do
assert Dummy.validators_on(:avatar).any?{ |validator| validator.kind == :attachment_content_type }
end
it "adds the attachment_size validator to the class" do
assert Dummy.validators_on(:avatar).any?{ |validator| validator.kind == :attachment_size }
end
it 'prevents you from attaching a file that violates that validation' do
Dummy.class_eval{ validate(:name) { raise "DO NOT RUN THIS" } }
dummy = Dummy.new(avatar: File.new(fixture_file("12k.png")))
expect(dummy.errors.keys).to match_array [:avatar_content_type, :avatar, :avatar_file_size]
assert_raises(RuntimeError){ dummy.valid? }
end
end
context 'using the helper with array of validations' do
before do
rebuild_class
Dummy.validates_attachment :avatar, file_type_ignorance: true, file_name: [
{ matches: /\A.*\.jpe?g\z/i, message: :invalid_extension },
{ matches: /\A.{,8}\..+\z/i, message: [:too_long, count: 8] },
]
end
it 'adds the attachment_file_name validator to the class' do
assert Dummy.validators_on(:avatar).any?{ |validator| validator.kind == :attachment_file_name }
end
it 'adds the attachment_file_name validator with two validations' do
assert_equal 2, Dummy.validators_on(:avatar).select{ |validator| validator.kind == :attachment_file_name }.size
end
it 'prevents you from attaching a file that violates all of these validations' do
Dummy.class_eval{ validate(:name) { raise 'DO NOT RUN THIS' } }
dummy = Dummy.new(avatar: File.new(fixture_file('spaced file.png')))
expect(dummy.errors.keys).to match_array [:avatar, :avatar_file_name]
assert_raises(RuntimeError){ dummy.valid? }
end
it 'prevents you from attaching a file that violates only first of these validations' do
Dummy.class_eval{ validate(:name) { raise 'DO NOT RUN THIS' } }
dummy = Dummy.new(avatar: File.new(fixture_file('5k.png')))
expect(dummy.errors.keys).to match_array [:avatar, :avatar_file_name]
assert_raises(RuntimeError){ dummy.valid? }
end
it 'prevents you from attaching a file that violates only second of these validations' do
Dummy.class_eval{ validate(:name) { raise 'DO NOT RUN THIS' } }
dummy = Dummy.new(avatar: File.new(fixture_file('spaced file.jpg')))
expect(dummy.errors.keys).to match_array [:avatar, :avatar_file_name]
assert_raises(RuntimeError){ dummy.valid? }
end
it 'allows you to attach a file that does not violate these validations' do
dummy = Dummy.new(avatar: File.new(fixture_file('rotated.jpg')))
expect(dummy.errors.full_messages).to be_empty
assert dummy.valid?
end
end
context "using the helper with a conditional" do
before do
rebuild_class
Dummy.validates_attachment :avatar, presence: true,
content_type: { content_type: "image/jpeg" },
size: { in: 0..10240 },
if: :title_present?
end
it "validates the attachment if title is present" do
Dummy.class_eval do
def title_present?
true
end
end
dummy = Dummy.new(avatar: File.new(fixture_file("12k.png")))
expect(dummy.errors.keys).to match_array [:avatar_content_type, :avatar, :avatar_file_size]
end
it "does not validate attachment if title is not present" do
Dummy.class_eval do
def title_present?
false
end
end
dummy = Dummy.new(avatar: File.new(fixture_file("12k.png")))
assert_equal [], dummy.errors.keys
end
end
context 'with no other validations on the Dummy#avatar attachment' do
before do
reset_class("Dummy")
Dummy.has_attached_file :avatar
Paperclip.reset_duplicate_clash_check!
end
it 'raises an error when no content_type validation exists' do
assert_raises(Paperclip::Errors::MissingRequiredValidatorError) do
Dummy.new(avatar: File.new(fixture_file("12k.png")))
end
end
it 'does not raise an error when a content_type validation exists' do
Dummy.validates_attachment :avatar, content_type: { content_type: "image/jpeg" }
assert_nothing_raised do
Dummy.new(avatar: File.new(fixture_file("12k.png")))
end
end
it 'does not raise an error when a content_type validation exists using validates_with' do
Dummy.validates_with Paperclip::Validators::AttachmentContentTypeValidator, attributes: :attachment, content_type: 'images/jpeg'
assert_nothing_raised do
Dummy.new(avatar: File.new(fixture_file("12k.png")))
end
end
it 'does not raise an error when an inherited validator is used' do
class MyValidator < Paperclip::Validators::AttachmentContentTypeValidator
def initialize(options)
options[:content_type] = "images/jpeg" unless options.has_key?(:content_type)
super
end
end
Dummy.validates_with MyValidator, attributes: :attachment
assert_nothing_raised do
Dummy.new(avatar: File.new(fixture_file("12k.png")))
end
end
it 'does not raise an error when a file_name validation exists' do
Dummy.validates_attachment :avatar, file_name: { matches: /png$/ }
assert_nothing_raised do
Dummy.new(avatar: File.new(fixture_file("12k.png")))
end
end
it 'does not raise an error when a the validation has been explicitly rejected' do
Dummy.validates_attachment :avatar, file_type_ignorance: true
assert_nothing_raised do
Dummy.new(avatar: File.new(fixture_file("12k.png")))
end
end
end
end
| 37.218182 | 134 | 0.68751 |
1a7896eec8678631e69fcca013c86668be48fcb8 | 6,731 | #
# Copyright 2011-2013, Dell
# Copyright 2013-2014, SUSE LINUX Products GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
describe MachinesController do
before do
allow_any_instance_of(Node).to receive(:system).and_return(true)
end
describe "GET index" do
before do
allow(File).to receive(:exist?).and_return(true)
allow(File).to receive(:exist?).with(
"/var/lib/crowbar/upgrade/8-to-9-progress.yml"
).and_return(false)
end
it "is successful" do
get :index, format: "json"
expect(response).to have_http_status(:ok)
end
context "with some nodes" do
it "renders json" do
get :index, format: "json"
expect(JSON.parse(response.body)).to be_a(Hash)
end
it "results in filled nodes hash" do
get :index, format: "json"
json = JSON.parse(response.body)
expect(json["nodes"]).to be_a(Array)
expect(json["nodes"]).not_to be_empty
end
it "contains name keys" do
get :index, format: "json"
json = JSON.parse(response.body)
node = json["nodes"].first
expect(node).to have_key("name")
end
it "contains alias keys" do
get :index, format: "json"
json = JSON.parse(response.body)
node = json["nodes"].first
expect(node).to have_key("alias")
end
end
context "without nodes" do
before do
allow(Node).to receive(:find_all_nodes).and_return([])
end
it "renders json" do
get :index, format: "json"
expect(JSON.parse(response.body)).to be_a(Hash)
end
it "results in empty nodes hash" do
get :index, format: "json"
json = JSON.parse(response.body)
expect(json["nodes"]).to be_a(Array)
expect(json["nodes"]).to be_empty
end
end
end
describe "GET show" do
it "is successful" do
get :show, name: "testing.crowbar.com", format: "json"
expect(response).to have_http_status(:ok)
end
it "renders json" do
get :show, name: "testing.crowbar.com", format: "json"
expect(JSON.parse(response.body)).to be_a(Hash)
end
context "for existent node" do
it "fetches with name" do
get :show, name: "testing.crowbar.com", format: "json"
json = JSON.parse(response.body)
expect(json["name"]).to eq("testing.crowbar.com")
end
it "works with fqdn" do
get :show, name: "testing.crowbar.com", format: "json"
json = JSON.parse(response.body)
expect(json["name"]).to eq("testing.crowbar.com")
end
end
context "for non-existent node" do
it "renders 404" do
get :show, name: "nonexistent", format: "json"
expect(response).to have_http_status(:not_found)
end
end
end
describe "POST role" do
context "for existent node" do
it "assignes role compute" do
expect_any_instance_of(Node).to receive(:intended_role=).with("compute")
post :role, name: "testing.crowbar.com", role: "compute", format: "json"
expect(response).to have_http_status(:ok)
end
end
it "return 404 (not found) http status when node does not exists" do
post :role, name: "nonexistent", format: "json"
expect(response).to have_http_status(:not_found)
end
it "return 422 (unprocessable_entity) http status when save fails" do
allow_any_instance_of(Node).to receive(:save).and_return(false)
expect_any_instance_of(Node).to receive(:intended_role=).with("compute")
post :role, name: "testing.crowbar.com", role: "compute", format: "json"
expect(response).to have_http_status(:unprocessable_entity)
end
end
describe "POST rename" do
context "for existent node" do
it "renames a node to tester" do
expect_any_instance_of(Node).to receive(:alias=).with("tester")
post :rename, name: "testing.crowbar.com", alias: "tester", format: "json"
expect(response).to have_http_status(:ok)
end
end
it "return 404 (not found) http status when node does not exists" do
post :rename, name: "nonexistent", format: "json"
expect(response).to have_http_status(:not_found)
end
it "return 422 (unprocessable_entity) http status when save fails" do
allow_any_instance_of(Node).to receive(:save).and_return(false)
expect_any_instance_of(Node).to receive(:alias=).with("tester")
post :rename, name: "testing.crowbar.com", alias: "tester", format: "json"
expect(response).to have_http_status(:unprocessable_entity)
end
end
[
:confupdate,
:identify
].each do |action|
describe "POST #{action}" do
context "for existent node" do
it "invokes #{action}" do
expect_any_instance_of(Node).to receive(action)
post action, name: "testing.crowbar.com", format: "json"
expect(response).to have_http_status(:ok)
end
end
context "for non-existent node" do
it "return 404 (not found) http status" do
post action, name: "nonexistent", format: "json"
expect(response).to have_http_status(:not_found)
end
end
end
end
[
:reinstall,
:reset,
:shutdown,
:reboot,
:poweron,
:powercycle,
:poweroff,
:allocate,
:delete
].each do |action|
describe "POST #{action}" do
context "for existent node" do
it "invokes #{action}" do
expect_any_instance_of(Node).to receive(action)
post action, name: "testing.crowbar.com", format: "json"
expect(response).to have_http_status(:ok)
end
it "return 403 (forbidden) http status for admin node" do
allow_any_instance_of(Node).to receive(:admin?).and_return(true)
post action, name: "testing.crowbar.com", format: "json"
expect(response).to have_http_status(:forbidden)
end
end
context "for non-existent node" do
it "renders 404" do
post action, name: "nonexistent", format: "json"
expect(response).to have_http_status(:not_found)
end
end
end
end
end
| 28.888412 | 82 | 0.635121 |
184679eb97f4786cf1b30793c47d515a260078a9 | 701 | module RedPandas
class Series
def initialize(data=[], type: Type::String)
@type = Type.lookup(type)
@data = data.map { |value| @type.cast(value) }
end
attr_reader :type, :data
def type_name
type.type_name
end
def empty?
@data.empty?
end
def [](arg)
@data[arg]
end
def size
@data.size
end
def select_by_position(selector)
case selector
when Range
data = @data[selector]
self.class.new(data, type: type)
when Enumerable
data = selector.map { |index| @data[index] }
self.class.new(data, type: type)
else
@data[selector]
end
end
end
end
| 17.525 | 52 | 0.560628 |
f8dbd589d10a363b8cdf4131bd5735c24d9ce373 | 803 | # frozen_string_literal: true
require "randrizer/types/base_type"
require "randrizer/types/type_expansion"
module Randrizer
module Types
class Nullable
include BaseType
class << self
def build(null_prob:, inner_type:)
new(null_prob: null_prob, inner_type: inner_type)
end
alias [] build
end
def initialize(null_prob:, inner_type:)
@null_prob = null_prob
@inner_type = inner_type
end
def validate!
raise ValidationError("null_prob must be < 1.0") if @null_prob > 1.0
raise ValidationError("null_prob must be > 0.0") if @null_prob < 0.0
end
def eval
return nil if rand > (1.0 - @null_prob)
TypeExpansion.expand_for(@inner_type).eval
end
end
end
end
| 21.702703 | 76 | 0.630137 |
b9e3f530d21d8058387f08ee9cf65413ddc8215e | 384 | testcase "Test Scope" do
def helper_method
"helped!"
end
test "can use helper method" do
helper_method.assert == "helped!"
end
context "sub-case inherits helpers" do
test "can use helper method" do
helper_method.assert == "helped!"
end
end
test "test can't access case methods" do
expect NameError do
method(:ok)
end
end
end
| 14.769231 | 42 | 0.643229 |
f70292b37dbfd92f9ca76ff964bc2ee1b86043be | 624 | # frozen_string_literal: true
module AcaEntities
module MagiMedicaid
module Mitc
module Contracts
# Schema and validation rules for {AcaEntities::MagiMedicaid::Mitc::Determination}
class DeterminationContract < Dry::Validation::Contract
# @!method call(opts)
# @param [Hash] opts the parameters to validate using this contract
# @option opts [Integer] :dependent_age required
# @return [Dry::Monads::Result]
params do
required(:dependent_age).filled(DependentAgeContract.params)
end
end
end
end
end
end
| 29.714286 | 90 | 0.647436 |
e24d837703b9292848411c7f5a571cec965bb3f5 | 1,875 | require 'test_helper'
class RemoteSoEasyPayTest < Test::Unit::TestCase
def setup
@gateway = SoEasyPayGateway.new(fixtures(:so_easy_pay))
@amount = 100
@credit_card = credit_card('4111111111111111', {:verification_value => '000', :month => '12', :year => '2015'})
@declined_card = credit_card('4000300011112220')
@options = {
:currency => 'EUR',
:ip => '192.168.19.123',
:email => '[email protected]',
:order_id => generate_unique_id,
:billing_address => address,
:description => 'Store Purchase'
}
end
def test_successful_purchase
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal 'Transaction successful', response.message
end
def test_unsuccessful_purchase
assert response = @gateway.purchase(@amount, @declined_card, @options)
assert_failure response
end
def test_authorize_and_capture
amount = @amount
assert auth = @gateway.authorize(amount, @credit_card, @options)
assert_success auth
assert auth.authorization
assert capture = @gateway.capture(amount, auth.authorization)
assert_success capture
end
def test_failed_capture
assert response = @gateway.capture(@amount, '')
assert_failure response
end
def test_successful_void
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert response = @gateway.void(response.authorization)
assert_success response
end
def test_invalid_login
gateway = SoEasyPayGateway.new(
:login => 'one',
:password => 'wrong'
)
assert response = gateway.purchase(@amount, @credit_card, @options)
assert_failure response
assert_equal 'Website verification failed, wrong websiteID or password', response.message
end
end
| 28.846154 | 115 | 0.693333 |
08bcd88136c40db4874f5866c9a21cbd79319174 | 628 | require 'roar/representer'
require 'roar/json'
require 'roar/json/hal'
require 'poto/representers/file_representer'
module Poto
module FileCollectionRepresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
collection :files, extend: FileRepresenter, embedded: true
link :self do |opts, helpers = opts[:env]['api.endpoint']|
helpers.url_for opts, '/files', page: helpers.page
end
link :next do |opts, helpers = opts[:env]['api.endpoint']|
helpers.url_for opts, '/files', page: next_page, per_page: helpers.per_page if next_page
end
end
end
| 27.304348 | 94 | 0.711783 |
3345636339e411d18d8fe3fe3b35f34359f43815 | 967 | class Direnv < Formula
desc "Load/unload environment variables based on $PWD"
homepage "https://direnv.net/"
url "https://github.com/direnv/direnv/archive/v2.14.0.tar.gz"
sha256 "917838827cb753153b91cb2d10c0d7c20cbaa85aa2dde520ee23653a74268ccd"
head "https://github.com/direnv/direnv.git"
bottle do
cellar :any_skip_relocation
sha256 "1dd24f6c9cb5082091f62a9a98aef11305e7a5f5d545d8368de421f0179318d4" => :high_sierra
sha256 "642d28694bda5a4471919a5b73709eb8da6655e188f58640b13d95da3aecb973" => :sierra
sha256 "257cba635f99eb52ba20be9558fe20fd67040cef64b758106a48ea241d500b72" => :el_capitan
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/direnv/direnv").install buildpath.children
cd "src/github.com/direnv/direnv" do
system "make", "install", "DESTDIR=#{prefix}"
prefix.install_metafiles
end
end
test do
system bin/"direnv", "status"
end
end
| 32.233333 | 93 | 0.752844 |
7acd39eae83d4af54d0c4c9d94658b5baa0f13d0 | 1,906 | class CreateNobelPrizeWinners < ActiveRecord::Migration
def change
create_table :nobel_prize_winners do |t|
t.string :first_name
t.string :last_name
t.string :category
t.integer :year
end
[
{first_name: 'Norman', last_name: 'Borlaug', category: 'Peace', year: 1970},
{first_name: 'Paul', last_name: 'Flory', category: 'Chemistry', year: 1974},
{first_name: 'Albert', last_name: 'Eintein', category: 'Physics', year: 1921},
{first_name: 'Samuel', last_name: 'Beckett', category: 'Literature', year: 1969},
{first_name: 'Niels', last_name: 'Bohr', category: 'Physics', year: 1922},
{first_name: 'Erwin', last_name: 'Schrodinger', category: 'Physics', year: 1933},
{first_name: 'Paul', last_name: 'Dirac', category: 'Physics', year: 1933},
{first_name: 'Enrico', last_name: 'Fermi', category: 'Physics', year: 1938},
{first_name: 'Richard', last_name: 'Feynman', category: 'Physics', year: 1965},
{first_name: 'Marie', last_name: 'Curie', category: 'Chemistry', year: 1911},
{first_name: 'James', last_name: 'Watson', category: 'Physiology or Medicine', year: 1962},
{first_name: 'Bertrand', last_name: 'Russell', category: 'Literature', year: 1950},
{first_name: 'John', last_name: 'Steinbeck', category: 'Literature', year: 1962},
{first_name: 'Nelson', last_name: 'Mandela', category: 'Peace', year: 1993},
{first_name: 'Jacques', last_name: 'Monod', category: 'Physiology or Medicine', year: 1965}
].each do |winner|
NobelPrizeWinner.create(winner)
end
end
end
| 61.483871 | 105 | 0.550892 |
1a36da7acb40f00f8c8e3b12302c8fd6d0ba85e0 | 139 | module Vagrant
module SshFS
class Error < Errors::VagrantError
error_namespace("vagrant.config.sshfs.error")
end
end
end
| 17.375 | 51 | 0.71223 |
e8a02be6780343d71da1b7cb99ae58902ebe57e2 | 529 | class ReferencesController < ApplicationController
def index
@references = Reference.all
end
def new
@reference = Reference.new
end
def create
@references = Reference.all
@reference = Reference.new(reference_params)
if @reference.save
respond_to do |format|
format.html { redirect_to root_path}
format.js
end
end
end
private
def reference_params
params.require(:reference).permit(:name, :email, :address, :phone, :job_title, :employer)
end
end
| 18.892857 | 95 | 0.671078 |
38cf089c062051d94b587835323839d001f865f9 | 835 | unless ENV['CI']
require 'simplecov'
SimpleCov.start
end
require 'seeclickfix'
require 'rspec'
require 'webmock/rspec'
def a_delete(url)
a_request(:delete, seeclickfix_url(url))
end
def a_get(url)
a_request(:get, seeclickfix_url(url))
end
def a_post(url)
a_request(:post, seeclickfix_url(url))
end
def a_put(url)
a_request(:put, seeclickfix_url(url))
end
def stub_delete(url)
stub_request(:delete, seeclickfix_url(url))
end
def stub_get(url)
stub_request(:get, seeclickfix_url(url))
end
def stub_post(url)
stub_request(:post, seeclickfix_url(url))
end
def stub_put(url)
stub_request(:put, seeclickfix_url(url))
end
def fixture_path
File.expand_path("../fixtures", __FILE__)
end
def fixture(file)
File.new(fixture_path + '/' + file)
end
def seeclickfix_url(url)
"https://seeclickfix.com/#{url}"
end
| 16.057692 | 45 | 0.742515 |