hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
5dca5e784da760c80800d9dd38cb4f2856615e85 | 3,572 | require 'spec_helper'
describe Banzai::ReferenceParser::CommitRangeParser, lib: true do
include ReferenceParserHelpers
let(:project) { create(:empty_project, :public) }
let(:user) { create(:user) }
subject { described_class.new(project, user) }
let(:link) { empty_html_link }
describe '#nodes_visible_to_user' do
context 'when the link has a data-issue attribute' do
before do
link['data-commit-range'] = '123..456'
end
it_behaves_like "referenced feature visibility", "repository"
end
end
describe '#referenced_by' do
context 'when the link has a data-project attribute' do
before do
link['data-project'] = project.id.to_s
end
context 'when the link as a data-commit-range attribute' do
before do
link['data-commit-range'] = '123..456'
end
it 'returns an Array of commit ranges' do
range = double(:range)
expect(subject).to receive(:find_object)
.with(project, '123..456')
.and_return(range)
expect(subject.referenced_by([link])).to eq([range])
end
it 'returns an empty Array when the commit range could not be found' do
expect(subject).to receive(:find_object)
.with(project, '123..456')
.and_return(nil)
expect(subject.referenced_by([link])).to eq([])
end
end
context 'when the link does not have a data-commit-range attribute' do
it 'returns an empty Array' do
expect(subject.referenced_by([link])).to eq([])
end
end
end
context 'when the link does not have a data-project attribute' do
it 'returns an empty Array' do
expect(subject.referenced_by([link])).to eq([])
end
end
end
describe '#commit_range_ids_per_project' do
before do
link['data-project'] = project.id.to_s
end
it 'returns a Hash containing range IDs per project' do
link['data-commit-range'] = '123..456'
hash = subject.commit_range_ids_per_project([link])
expect(hash).to be_an_instance_of(Hash)
expect(hash[project.id].to_a).to eq(['123..456'])
end
it 'does not add a project when the data-commit-range attribute is empty' do
hash = subject.commit_range_ids_per_project([link])
expect(hash).to be_empty
end
end
describe '#find_ranges' do
it 'returns an Array of range objects' do
range = double(:commit)
expect(subject).to receive(:find_object)
.with(project, '123..456')
.and_return(range)
expect(subject.find_ranges(project, ['123..456'])).to eq([range])
end
it 'skips ranges that could not be found' do
expect(subject).to receive(:find_object)
.with(project, '123..456')
.and_return(nil)
expect(subject.find_ranges(project, ['123..456'])).to eq([])
end
end
describe '#find_object' do
let(:range) { double(:range) }
before do
expect(CommitRange).to receive(:new).and_return(range)
end
context 'when the range has valid commits' do
it 'returns the commit range' do
expect(range).to receive(:valid_commits?).and_return(true)
expect(subject.find_object(project, '123..456')).to eq(range)
end
end
context 'when the range does not have any valid commits' do
it 'returns nil' do
expect(range).to receive(:valid_commits?).and_return(false)
expect(subject.find_object(project, '123..456')).to be_nil
end
end
end
end
| 27.267176 | 80 | 0.632139 |
e93e199f107c921e472ecdbacbc50989be00e98d | 1,203 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_09_12_011355) do
create_table "apartments", force: :cascade do |t|
t.string "address"
t.float "price"
t.text "description"
t.string "image_url"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "posts", force: :cascade do |t|
t.string "title"
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 37.59375 | 86 | 0.746467 |
38e68df212e0fbd555f498cd7223fb084daec42f | 176 | module Bitex
module Resources
module Merchants
# This resource allow map Payments data to JsonApi.
class BitcoinAddress < Private
end
end
end
end
| 17.6 | 57 | 0.681818 |
62d605eb1dbcc4b058022cbeb0ee614de0b70857 | 1,925 | @version = "2.0.1"
Pod::Spec.new do |s|
s.name = "DZNPhotoPickerController"
s.version = @version
s.summary = "A photo search/picker for iOS using popular providers like 500px, Flickr, Intagram, Google Images, etc."
s.description = "This framework tries to mimic as close as possible the native UIImagePickerController API for iOS7, in terms of features, appearance and behaviour."
s.homepage = "https://github.com/dzenbot/DZNPhotoPickerController"
s.screenshots = "https://raw.githubusercontent.com/dzenbot/DZNPhotoPickerController/master/Docs/screenshots.png"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "Ignacio Romero Z." => "[email protected]" }
s.source = { :git => "https://github.com/dzenbot/UIPhotoPickerController.git", :tag => "v#{s.version}" }
s.default_subspec = 'Core'
s.resources = 'Resources', 'Source/Resources/**/*.*'
s.requires_arc = true
s.platform = :ios, '8.0'
s.public_header_files = 'Source/Classes/*/*.h'
s.subspec 'Core' do |ss|
ss.source_files = 'Source/Classes/Core/*.{h,m}'
ss.dependency 'DZNPhotoPickerController/Services'
ss.dependency 'DZNPhotoPickerController/Editor'
ss.dependency 'SDWebImage', '~> 5.0'
ss.dependency 'DZNEmptyDataSet', '~> 1.7'
end
s.subspec 'Services' do |ss|
ss.source_files = 'Source/Classes/Services/*.{h,m}',
'Source/Classes/Core/DZNPhotoPickerControllerConstants.{h,m}'
ss.dependency 'AFNetworking', '~> 3.0'
ss.prefix_header_contents = '#import <MobileCoreServices/MobileCoreServices.h>',
'#import <SystemConfiguration/SystemConfiguration.h>'
end
s.subspec 'Editor' do |ss|
ss.source_files = 'Source/Classes/Editor/*.{h,m}',
'Source/Classes/Core/DZNPhotoPickerControllerConstants.{h,m}'
end
end
| 42.777778 | 171 | 0.645195 |
621dbed1fbca3c97df19cbef768857f5588b0c40 | 1,976 | class ContinueBox
attr_accessor :x, :y, :w, :h, :text, :lines, :page, :line_height
def initialize box, font=nil
@x = box.x
@y = box.y
# W and H are in characters, not pixels
# Make sure the H you set leaves room for the "..." at the bottom
@w = box.w
@h = box.h
@font = box.font
@text = ""
@lines = []
# TODO: page currently isn't used, but could be added to the "..." line for context,
# especially if you add a way to calculate the total number of pages.
@page = 0
@line_number = 0
@line_height = 26
@more = "..."
end
def on_last_page
(@line_number + @h) >= @lines.length
end
def << story
@text = story
@lines = $args.string.wrapped_lines @text, @w
end
def advance
next_line = @line_number + @h
if next_line >= @lines.length
return false
else
@page += 1
@line_number = next_line
end
end
def back
if @line_number == 0
return false
end
@line_number = @line_number - @h
if @line_number < 0
@line_number = 0
end
if @page > 0
@page -= 1
end
end
def reset
@line_number = 0
@page = 0
end
def labels
res = []
@h.times do |disp_row|
next_line = @line_number + disp_row
if(next_line < @lines.length)
anchor_y = @y - (@line_height * disp_row)
anchor_x = @x
line = @lines[next_line].strip
res << { x: anchor_x, y: anchor_y, text: line, font: @font }
end
end
if @line_number + @h < @lines.length
res << [ @x, @y - (@line_height * @h), @more, @font ]
end
return res
end
def draw surface
labels.each do |s|
surface.labels << s
end
end
def serialize
{
x: @x,
y: @y,
w: @w,
h: @h,
text: @text,
page: @page,
line_number: @line_number
}
end
def to_s
serialize.to_s
end
def inspect
serialize.to_s
end
end | 19.372549 | 88 | 0.544534 |
bfa529f758e8dc7533d379f6ee9d60952d1c215d | 3,393 | module Hancock
class Recipient < Hancock::Base
class Recreator
attr_reader :docusign_recipient, :tabs
IS_INVALID_RECIPIENT_ERROR = ->(e) { e.docusign_status == 'INVALID_RECIPIENT_ID' }
def initialize(docusign_recipient)
@docusign_recipient = docusign_recipient
begin
@tabs = docusign_recipient.tabs
rescue Hancock::RequestError => e
if IS_INVALID_RECIPIENT_ERROR[e]
# We deleted the recipient without recreating it previously.
# Probably got an error response from DocuSign.
# Let this slide to recover.
Hancock.logger.error("RECIPIENT RECREATION FAILED PREVIOUSLY, TABS LOST: #{e.message}. RECIPIENT: #{recipient_params}")
@tabs = {} # tabs are gone
else
Hancock.logger.error("ERROR FETCHING RECIPIENT TABS: #{e.message}. RECIPIENT: #{recipient_params}")
raise e
end
end
end
# Deleting a recipient from an envelope can cause the envelope's status to
# change. For example, if all other recipients had signed except this one,
# the envelope status will change to 'complete' when this recipient is
# deleted, and we will no longer be able to add the recipient back onto the
# envelope. Hence the placeholder recipient.
def recreate_with_tabs
tries ||= 3
placeholder_docusign_recipient.create
docusign_recipient.delete
docusign_recipient.create
docusign_recipient.create_tabs(tabs) unless tabs.empty?
delete_existing_placholders
rescue Timeout::Error => e
retry if (tries -= 1) > 0
Hancock.logger.error("TIMEOUT WHILE RECREATING RECIPIENT: #{e.message}. RECIPIENT: #{recipient_params}")
raise e
rescue => e
Hancock.logger.error("ERROR RECREATING RECIPIENT: #{e.message}. RECIPIENT: #{recipient_params}")
raise e
end
def delete_existing_placholders
Recipient.fetch_for_envelope(docusign_recipient.envelope_identifier).select do |recipient|
recipient.email == placeholder_recipient.email
end.each do |placeholder|
placeholder.docusign_recipient.delete
end
end
def placeholder_docusign_recipient
@placeholder_docusign_recipient ||= DocusignRecipient.new(placeholder_recipient)
end
private
def placeholder_recipient
Recipient.new(
:client_user_id => placeholder_identifier, # Don't send an email
:identifier => placeholder_identifier,
:email => Hancock.placeholder_email,
:name => 'Placeholder while recreating recipient',
:envelope_identifier => docusign_recipient.envelope_identifier,
:recipient_type => docusign_recipient.recipient_type,
:routing_order => docusign_recipient.routing_order,
:embedded_start_url => nil # No really, don't send an email
)
end
def placeholder_identifier
@placeholder_identifier ||= SecureRandom.uuid
end
def recipient_params
docusign_recipient.to_hash.merge(
# Envelope identifier is not in the #to_hash call as it is not sent to DocuSign
envelopeId: docusign_recipient.envelope_identifier
)
end
end
end
end
| 37.7 | 131 | 0.655172 |
bb1296271f7a326e0dfc8ad233a9519551bc58f3 | 2,676 | # frozen_string_literal: true
require 'spec_helper'
require 'webmock/rspec'
describe 'gitlab_ci_runner::register_to_file' do
let(:url) { 'https://gitlab.example.org' }
let(:regtoken) { 'registration-token' }
let(:runner_name) { 'testrunner' }
let(:filename) { "/etc/gitlab-runner/auth-token-#{runner_name}" }
let(:return_hash) do
{
'id' => 1234,
'token' => 'auth-token'
}
end
it { is_expected.not_to eq(nil) }
it { is_expected.to run.with_params.and_raise_error(ArgumentError) }
it { is_expected.to run.with_params('').and_raise_error(ArgumentError) }
it { is_expected.to run.with_params('ftp://foooo.bar').and_raise_error(ArgumentError) }
it { is_expected.to run.with_params('https://gitlab.com', 1234).and_raise_error(ArgumentError) }
it { is_expected.to run.with_params('https://gitlab.com', 'registration-token', project: 1234).and_raise_error(ArgumentError) }
context 'uses an existing auth token from file' do
before do
allow(File).to receive(:exist?).and_call_original
allow(File).to receive(:exist?).with(filename).and_return(true)
allow(File).to receive(:read).and_call_original
allow(File).to receive(:read).with(filename).and_return(return_hash['token'])
end
it { is_expected.to run.with_params(url, regtoken, runner_name).and_return(return_hash['token']) }
end
context "retrieves from Gitlab and writes auth token to file if it doesn't exist" do
before do
allow(PuppetX::Gitlab::Runner).to receive(:register).with(url, { 'token' => regtoken }, nil, nil).and_return(return_hash)
allow(File).to receive(:exist?).and_call_original
allow(File).to receive(:exist?).with(File.dirname(filename)).and_return(true)
allow(File).to receive(:write).with(filename, return_hash['token'])
allow(File).to receive(:chmod).with(0o400, filename)
end
it { is_expected.to run.with_params(url, regtoken, runner_name).and_return(return_hash['token']) }
context 'with ca_file option' do
before do
allow(PuppetX::Gitlab::Runner).to receive(:register).with(url, { 'token' => regtoken }, nil, '/path/to/ca_file').and_return(return_hash)
end
it { is_expected.to run.with_params(url, regtoken, runner_name, {}, nil, '/path/to/ca_file').and_return(return_hash['token']) }
end
end
context 'noop does not register runner and returns dummy token' do
before do
allow(Puppet.settings).to receive(:[]).and_call_original
allow(Puppet.settings).to receive(:[]).with(:noop).and_return(true)
end
it { is_expected.to run.with_params(url, regtoken, runner_name).and_return('DUMMY-NOOP-TOKEN') }
end
end
| 41.169231 | 144 | 0.701046 |
ff1c63ee8d8bc9a1ee0943e912b0677cce9fe01e | 1,015 | cask "podolski" do
version "1.2.2,9724"
sha256 "36c3e778a3aaa82dcdb18459b9e01ba490884b52cc200941c299362024523994"
# uhedownloads-heckmannaudiogmb.netdna-ssl.com/ was verified as official when first introduced to the cask
url "https://uhedownloads-heckmannaudiogmb.netdna-ssl.com/releases/Podolski_#{version.before_comma.no_dots}_#{version.after_comma}_Mac.zip"
appcast "https://u-he.com/products/podolski/releasenotes.html"
name "Podolski"
desc "Virtual analogue synthesizer"
homepage "https://u-he.com/products/podolski/"
pkg "Podolski_#{version.after_comma}_Mac/Podolski #{version.before_comma} Installer.pkg"
uninstall pkgutil: [
"com.u-he.Podolski.aax.pkg",
"com.u-he.Podolski.au.pkg",
"com.u-he.Podolski.data.pkg",
"com.u-he.Podolski.documentation.pkg",
"com.u-he.Podolski.nks.pkg",
"com.u-he.Podolski.presets.pkg",
"com.u-he.Podolski.tuningFiles.pkg",
"com.u-he.Podolski.vst.pkg",
"com.u-he.Podolski.vst3.pkg",
]
caveats do
reboot
end
end
| 33.833333 | 141 | 0.733005 |
87c74ea6f6c4734c800e135c9b5ce0172ae50273 | 1,153 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'eversign/version'
Gem::Specification.new do |spec|
spec.name = "eversign"
spec.version = Eversign::VERSION
spec.authors = ["eversign"]
spec.email = ["[email protected]"]
spec.summary = %q{Gem for Eversign API Client.}
spec.description = %q{Gem for Eversign API SDK.}
spec.homepage = "https://github.com/workatbest/eversign-ruby-sdk"
spec.license = "MIT"
spec.files = Dir['lib/**/*.rb', 'READ']
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency 'rails', '>= 4'
spec.add_dependency 'faraday', '~> 0.13'
spec.add_dependency 'addressable', '~> 2.5'
spec.add_dependency 'kartograph', '~> 0.2.3'
spec.add_dependency 'configurations', '~> 2.2'
spec.add_development_dependency 'bundler', '~> 1.12'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'simplecov'
end
| 36.03125 | 74 | 0.653079 |
e914cf7d22f87ac5ba9dc0336f2d2ffe304fe804 | 1,072 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_01_09_221741) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "posts", force: :cascade do |t|
t.string "title"
t.text "body"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 41.230769 | 86 | 0.768657 |
f7cf97140dd58d609ad11d120d9445fb5bb924ad | 4,488 | class Grafana < Formula
desc "Gorgeous metric visualizations and dashboards for timeseries databases"
homepage "https://grafana.com"
url "https://github.com/grafana/grafana/archive/v6.4.4.tar.gz"
sha256 "fad788bb4f8c4cef776a906603e5277955b6f63e150d34928980892abc5cc651"
head "https://github.com/grafana/grafana.git"
bottle do
cellar :any_skip_relocation
sha256 "43d7922f548665368c27aa56c8fa1a6b3698e3be8502876b5d448c8d15cb0fc2" => :catalina
sha256 "f62579607d03134a4ed8dbceff747edc99a3f8d1b166d19c16d5f28c55ce9a63" => :mojave
sha256 "7dcfbda62970e8effcbe83a3246603ae1d98024f1cfeaeeeff82877d69a9aac8" => :high_sierra
end
depends_on "[email protected]" => :build
depends_on "node@10" => :build
depends_on "yarn" => :build
unless OS.mac?
depends_on "fontconfig"
depends_on "freetype"
depends_on "zlib"
end
def install
ENV["GOPATH"] = buildpath
grafana_path = buildpath/"src/github.com/grafana/grafana"
grafana_path.install buildpath.children
cd grafana_path do
system "go", "run", "build.go", "build"
system "yarn", "install", "--ignore-engines"
system "node_modules/grunt-cli/bin/grunt", "build"
bin.install "bin/darwin-amd64/grafana-cli"
bin.install "bin/darwin-amd64/grafana-server"
(etc/"grafana").mkpath
cp("conf/sample.ini", "conf/grafana.ini.example")
etc.install "conf/sample.ini" => "grafana/grafana.ini"
etc.install "conf/grafana.ini.example" => "grafana/grafana.ini.example"
pkgshare.install "conf", "public", "tools", "vendor"
prefix.install_metafiles
end
end
def post_install
(var/"log/grafana").mkpath
(var/"lib/grafana/plugins").mkpath
end
plist_options :manual => "grafana-server --config=#{HOMEBREW_PREFIX}/etc/grafana/grafana.ini --homepath #{HOMEBREW_PREFIX}/share/grafana --packaging=brew cfg:default.paths.logs=#{HOMEBREW_PREFIX}/var/log/grafana cfg:default.paths.data=#{HOMEBREW_PREFIX}/var/lib/grafana cfg:default.paths.plugins=#{HOMEBREW_PREFIX}/var/lib/grafana/plugins"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/grafana-server</string>
<string>--config</string>
<string>#{etc}/grafana/grafana.ini</string>
<string>--homepath</string>
<string>#{opt_pkgshare}</string>
<string>--packaging=brew</string>
<string>cfg:default.paths.logs=#{var}/log/grafana</string>
<string>cfg:default.paths.data=#{var}/lib/grafana</string>
<string>cfg:default.paths.plugins=#{var}/lib/grafana/plugins</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}/lib/grafana</string>
<key>StandardErrorPath</key>
<string>#{var}/log/grafana/grafana-stderr.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/grafana/grafana-stdout.log</string>
<key>SoftResourceLimits</key>
<dict>
<key>NumberOfFiles</key>
<integer>10240</integer>
</dict>
</dict>
</plist>
EOS
end
test do
require "pty"
require "timeout"
# first test
system bin/"grafana-server", "-v"
# avoid stepping on anything that may be present in this directory
tdir = File.join(Dir.pwd, "grafana-test")
Dir.mkdir(tdir)
logdir = File.join(tdir, "log")
datadir = File.join(tdir, "data")
plugdir = File.join(tdir, "plugins")
[logdir, datadir, plugdir].each do |d|
Dir.mkdir(d)
end
Dir.chdir(pkgshare)
res = PTY.spawn(bin/"grafana-server",
"cfg:default.paths.logs=#{logdir}",
"cfg:default.paths.data=#{datadir}",
"cfg:default.paths.plugins=#{plugdir}",
"cfg:default.server.http_port=50100")
r = res[0]
w = res[1]
pid = res[2]
listening = Timeout.timeout(5) do
li = false
r.each do |l|
if /Initializing HTTPServer/.match?(l)
li = true
break
end
end
li
end
Process.kill("TERM", pid)
w.close
r.close
listening
end
end
| 32.057143 | 341 | 0.64082 |
1db250be842570144550a6f2612260afa71e7ba2 | 2,093 | require File.dirname(__FILE__) + "/../spec_helper"
describe CartsController do
include SpecControllerHelper
before :each do
scenario :shop_with_active_product
scenario :order_with_order_lines
scenario :cart_with_cart_items
set_resource_paths :cart, '/shops/1/'
end
it "should be an BaseController" do
controller.should be_kind_of(BaseController)
end
describe "routing" do
with_options :path_prefix => '/shops/1/', :section_id => "1" do |route|
route.it_maps :get, "carts/1", :show, :id => '1'
route.it_maps :post, "carts", :create
route.it_maps :put, "carts/1", :update, :id => '1'
route.it_maps :delete, "carts/1", :destroy, :id => '1'
end
end
describe "GET to :show" do
act! { request_to :get, @member_path }
it_assigns :product
it "should find cart items" do
@cart.cart_items.should_receive(:find).any_number_of_times.and_return [@cart_item]
act!
end
end
describe "GET to :create" do
act! { request_to :post, @collection_path }
it "should add the new cart item" do
@cart.cart_items.stub!(:build).and_return @cart_item
act!
end
end
describe "GET to :create" do
act! { request_to :post, @collection_path }
it "should add the new cart item" do
@cart.cart_items.stub!(:build).and_return @cart_item
act!
end
end
describe "PUT to :update" do
it "fetches a cart item from cart.cart_items" do
@cart.cart_items.should_receive(:find).any_number_of_times.and_return @cart_item
end
it "should update the cart item" do
@cart_item.stub!(:attributes=).and_return true
end
end
describe "DELETE to :destroy" do
it "fetches a cart item from cart.cart_items" do
@cart.cart_items.should_receive(:find).any_number_of_times.and_return @cart_item
end
it "should remove the cart item" do
@cart_item.stub!(:destroy).and_return true
end
end
end | 27.906667 | 89 | 0.62924 |
d5eb69cb5f0451bde47d3f6029091de5133b2be7 | 461 | class Launchcontrol < Cask
version '1.16.1'
sha256 '1475d384d1bfdb76e49a288b96a310b23a150946c568e045db0c0597b6c0b5a2'
url "http://www.soma-zone.com/download/files/LaunchControl_#{version}.tar.bz2"
appcast 'http://www.soma-zone.com/LaunchControl/a/appcast.xml',
:sha256 => 'a82b64b1b68b0fc013145825eee9b18930378caa24a664d93d5777848c6fcc15'
homepage 'http://www.soma-zone.com/LaunchControl/'
license :unknown
app 'LaunchControl.app'
end
| 35.461538 | 87 | 0.776573 |
7aac946a93a84bccdd8a2fbfb0497ee04b080333 | 5,803 | =begin
#Ory Kratos
#Welcome to the ORY Kratos HTTP API documentation!
The version of the OpenAPI document: v0.4.6-alpha.1
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'cgi'
module OryHydraClient
class HealthApi
attr_accessor :api_client
def initialize(api_client = ApiClient.default)
@api_client = api_client
end
# Check alive status
# This endpoint returns a 200 status code when the HTTP server is up running. This status does currently not include checks whether the database connection is working. If the service supports TLS Edge Termination, this endpoint does not require the `X-Forwarded-Proto` header to be set. Be aware that if you are running multiple nodes of this service, the health status will never refer to the cluster state, only to a single instance.
# @param [Hash] opts the optional parameters
# @return [HealthStatus]
def is_instance_alive(opts = {})
data, _status_code, _headers = is_instance_alive_with_http_info(opts)
data
end
# Check alive status
# This endpoint returns a 200 status code when the HTTP server is up running. This status does currently not include checks whether the database connection is working. If the service supports TLS Edge Termination, this endpoint does not require the `X-Forwarded-Proto` header to be set. Be aware that if you are running multiple nodes of this service, the health status will never refer to the cluster state, only to a single instance.
# @param [Hash] opts the optional parameters
# @return [Array<(HealthStatus, Integer, Hash)>] HealthStatus data, response status code and response headers
def is_instance_alive_with_http_info(opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: HealthApi.is_instance_alive ...'
end
# resource path
local_var_path = '/health/alive'
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:body]
# return_type
return_type = opts[:return_type] || 'HealthStatus'
# auth_names
auth_names = opts[:auth_names] || []
new_options = opts.merge(
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: HealthApi#is_instance_alive\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
# Check readiness status
# This endpoint returns a 200 status code when the HTTP server is up running and the environment dependencies (e.g. the database) are responsive as well. If the service supports TLS Edge Termination, this endpoint does not require the `X-Forwarded-Proto` header to be set. Be aware that if you are running multiple nodes of this service, the health status will never refer to the cluster state, only to a single instance.
# @param [Hash] opts the optional parameters
# @return [HealthStatus]
def is_instance_ready(opts = {})
data, _status_code, _headers = is_instance_ready_with_http_info(opts)
data
end
# Check readiness status
# This endpoint returns a 200 status code when the HTTP server is up running and the environment dependencies (e.g. the database) are responsive as well. If the service supports TLS Edge Termination, this endpoint does not require the `X-Forwarded-Proto` header to be set. Be aware that if you are running multiple nodes of this service, the health status will never refer to the cluster state, only to a single instance.
# @param [Hash] opts the optional parameters
# @return [Array<(HealthStatus, Integer, Hash)>] HealthStatus data, response status code and response headers
def is_instance_ready_with_http_info(opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: HealthApi.is_instance_ready ...'
end
# resource path
local_var_path = '/health/ready'
# query parameters
query_params = opts[:query_params] || {}
# header parameters
header_params = opts[:header_params] || {}
# HTTP header 'Accept' (if needed)
header_params['Accept'] = @api_client.select_header_accept(['application/json'])
# form parameters
form_params = opts[:form_params] || {}
# http body (model)
post_body = opts[:body]
# return_type
return_type = opts[:return_type] || 'HealthStatus'
# auth_names
auth_names = opts[:auth_names] || []
new_options = opts.merge(
:header_params => header_params,
:query_params => query_params,
:form_params => form_params,
:body => post_body,
:auth_names => auth_names,
:return_type => return_type
)
data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
if @api_client.config.debugging
@api_client.config.logger.debug "API called: HealthApi#is_instance_ready\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
end
return data, status_code, headers
end
end
end
| 42.985185 | 451 | 0.699466 |
d583b506107c2eda1dda6f033f0487620eac17ec | 540 | Pod::Spec.new do |s|
s.name = "SimpleLogger"
s.version = "2.4.2"
s.summary = "Simple logging tool"
s.homepage = "https://github.com/thinkaboutiter/SimpleLogger"
s.license = 'MIT'
s.author = { "Thinka Boutiter" => "[email protected]" }
s.source = {
:git => "https://github.com/thinkaboutiter/SimpleLogger.git",
:tag => s.version }
s.platforms = {
:ios => "10.0",
:osx => "10.10",
:watchos => "3.0",
:tvos => "10.0" }
s.requires_arc = true
s.source_files = 'Sources/SimpleLogger/**/*'
end
| 28.421053 | 65 | 0.6 |
61753a1e433475707cddf3728b64ffe0adde6052 | 1,558 | class Tomcat < Formula
desc "Implementation of Java Servlet and JavaServer Pages"
homepage "https://tomcat.apache.org/"
url "https://www.apache.org/dyn/closer.cgi?path=/tomcat/tomcat-9/v9.0.13/bin/apache-tomcat-9.0.13.tar.gz"
sha256 "b8f7b7ac0cb6c53ae6524086efc69c6ea6933344908b294d91f9af3c67b559e7"
bottle :unneeded
depends_on :java => "1.8+"
def install
# Remove Windows scripts
rm_rf Dir["bin/*.bat"]
# Install files
prefix.install %w[NOTICE LICENSE RELEASE-NOTES RUNNING.txt]
libexec.install Dir["*"]
bin.install_symlink "#{libexec}/bin/catalina.sh" => "catalina"
end
plist_options :manual => "catalina run"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Disabled</key>
<false/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/catalina</string>
<string>run</string>
</array>
<key>KeepAlive</key>
<true/>
</dict>
</plist>
EOS
end
test do
ENV["CATALINA_BASE"] = testpath
cp_r Dir["#{libexec}/*"], testpath
rm Dir["#{libexec}/logs/*"]
pid = fork do
exec bin/"catalina", "start"
end
sleep 3
begin
system bin/"catalina", "stop"
ensure
Process.wait pid
end
assert_predicate testpath/"logs/catalina.out", :exist?
end
end
| 25.540984 | 107 | 0.6181 |
e94b3237bea3435236033761b0683ac822bce16b | 3,210 | module Cucumber
class StepMatch #:nodoc:
attr_reader :step_definition
# Creates a new StepMatch. The +name_to_report+ argument is what's reported, unless it's is,
# in which case +name_to_report+ is used instead.
#
def initialize(step_definition, name_to_match, name_to_report, step_arguments)
raise "name_to_match can't be nil" if name_to_match.nil?
raise "step_arguments can't be nil (but it can be an empty array)" if step_arguments.nil?
@step_definition, @name_to_match, @name_to_report, @step_arguments = step_definition, name_to_match, name_to_report, step_arguments
end
def args
@step_arguments.map{|g| g.val}
end
def name
@name_to_report
end
def invoke(multiline_arg)
multiline_arg = Ast::PyString.new(multiline_arg) if String === multiline_arg
all_args = args
all_args << multiline_arg.to_step_definition_arg if multiline_arg
@step_definition.invoke(all_args)
end
# Formats the matched arguments of the associated Step. This method
# is usually called from visitors, which render output.
#
# The +format+ can either be a String or a Proc.
#
# If it is a String it should be a format string according to
# <tt>Kernel#sprinf</tt>, for example:
#
# '<span class="param">%s</span></tt>'
#
# If it is a Proc, it should take one argument and return the formatted
# argument, for example:
#
# lambda { |param| "[#{param}]" }
#
def format_args(format = lambda{|a| a}, &proc)
@name_to_report || replace_arguments(@name_to_match, @step_arguments, format, &proc)
end
def file_colon_line
@step_definition.file_colon_line
end
def backtrace_line
"#{file_colon_line}:in `#{@step_definition.regexp_source}'"
end
def text_length
@step_definition.regexp_source.unpack('U*').length
end
def replace_arguments(string, step_arguments, format, &proc)
s = string.dup
offset = past_offset = 0
step_arguments.each do |step_argument|
next if step_argument.byte_offset.nil? || step_argument.byte_offset < past_offset
replacement = if block_given?
proc.call(step_argument.val)
elsif Proc === format
format.call(step_argument.val)
else
format % step_argument.val
end
s[step_argument.byte_offset + offset, step_argument.val.length] = replacement
offset += replacement.unpack('U*').length - step_argument.val.unpack('U*').length
past_offset = step_argument.byte_offset + step_argument.val.length
end
s
end
def inspect #:nodoc:
sprintf("#<%s:0x%x>", self.class, self.object_id)
end
end
class NoStepMatch #:nodoc:
attr_reader :step_definition, :name
def initialize(step, name)
@step = step
@name = name
end
def format_args(format)
@name
end
def file_colon_line
raise "No file:line for #{@step}" unless @step.file_colon_line
@step.file_colon_line
end
def backtrace_line
@step.backtrace_line
end
def text_length
@step.text_length
end
end
end
| 28.660714 | 137 | 0.660125 |
91a58253fdfb104b486e9dd4ae3cf78273c4039b | 355 | # coding: utf-8
# vim: set expandtab tabstop=2 shiftwidth=2 softtabstop=2 autoindent:
class BMFF::Box::MovieExtendsHeader < BMFF::Box::Full
attr_accessor :fragment_duration
register_box "mehd"
def parse_data
super
if version == 1
@fragment_duration = io.get_uint64
else
@fragment_duration = io.get_uint32
end
end
end
| 20.882353 | 69 | 0.709859 |
bb76488846f06df3b1a1bf7d6c540f57d359db2b | 144 | class User < ActiveRecord::Base
has_secure_password
has_many :vacations
validates_presence_of :username, :email, :password
end
| 20.571429 | 54 | 0.736111 |
e221a4da729b935b35195c4d13f2513a64ef9aee | 7,772 | #
# Author:: Tyler Cloke (<[email protected]>)
# Copyright:: Copyright 2015-2016, Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
require "chef/knife/user_key_list"
require "chef/knife/client_key_list"
require "chef/knife/key_list"
require "chef/key"
describe "key list commands that inherit knife" do
shared_examples_for "a key list command" do
let(:stderr) { StringIO.new }
let(:params) { [] }
let(:service_object) { instance_double(Chef::Knife::KeyList) }
let(:command) do
c = described_class.new([])
c.ui.config[:disable_editing] = true
allow(c.ui).to receive(:stderr).and_return(stderr)
allow(c.ui).to receive(:stdout).and_return(stderr)
allow(c).to receive(:show_usage)
c
end
context "after apply_params! is called with valid args" do
let(:params) { ["charmander"] }
before do
command.apply_params!(params)
end
context "when the service object is called" do
it "creates a new instance of Chef::Knife::KeyList with the correct args" do
expect(Chef::Knife::KeyList).to receive(:new)
.with("charmander", command.list_method, command.ui, command.config)
.and_return(service_object)
command.service_object
end
end # when the service object is called
end # after apply_params! is called with valid args
end # a key list command
describe Chef::Knife::UserKeyList do
it_should_behave_like "a key list command"
# defined in key_helpers.rb
it_should_behave_like "a knife key command" do
let(:service_object) { instance_double(Chef::Knife::KeyList) }
let(:params) { ["charmander"] }
end
end
describe Chef::Knife::ClientKeyList do
it_should_behave_like "a key list command"
# defined in key_helpers.rb
it_should_behave_like "a knife key command" do
let(:service_object) { instance_double(Chef::Knife::KeyList) }
let(:params) { ["charmander"] }
end
end
end
describe Chef::Knife::KeyList do
let(:config) { Hash.new }
let(:actor) { "charmander" }
let(:ui) { instance_double("Chef::Knife::UI") }
shared_examples_for "key list run command" do
let(:key_list_object) do
described_class.new(actor, list_method, ui, config)
end
before do
allow(Chef::Key).to receive(list_method).and_return(http_response)
allow(key_list_object).to receive(:display_info)
# simply pass the string though that colorize takes in
allow(key_list_object).to receive(:colorize).with(kind_of(String)) do |input|
input
end
end
context "when only_expired and only_non_expired were both passed" do
before do
key_list_object.config[:only_expired] = true
key_list_object.config[:only_non_expired] = true
end
it "raises a Chef::Exceptions::KeyCommandInputError with the proper error message" do
expect { key_list_object.run }.to raise_error(Chef::Exceptions::KeyCommandInputError, key_list_object.expired_and_non_expired_msg)
end
end
context "when the command is run" do
before do
key_list_object.config[:only_expired] = false
key_list_object.config[:only_non_expired] = false
key_list_object.config[:with_details] = false
end
it "calls Chef::Key with the proper list command and input" do
expect(Chef::Key).to receive(list_method).with(actor)
key_list_object.run
end
it "displays all the keys" do
expect(key_list_object).to receive(:display_info).with(/non-expired/).twice
expect(key_list_object).to receive(:display_info).with(/out-of-date/).once
key_list_object.run
end
context "when only_expired is called" do
before do
key_list_object.config[:only_expired] = true
end
it "excludes displaying non-expired keys" do
expect(key_list_object).to receive(:display_info).with(/non-expired/).exactly(0).times
key_list_object.run
end
it "displays the expired keys" do
expect(key_list_object).to receive(:display_info).with(/out-of-date/).once
key_list_object.run
end
end # when only_expired is called
context "when only_non_expired is called" do
before do
key_list_object.config[:only_non_expired] = true
end
it "excludes displaying expired keys" do
expect(key_list_object).to receive(:display_info).with(/out-of-date/).exactly(0).times
key_list_object.run
end
it "displays the non-expired keys" do
expect(key_list_object).to receive(:display_info).with(/non-expired/).twice
key_list_object.run
end
end # when only_expired is called
context "when with_details is false" do
before do
key_list_object.config[:with_details] = false
end
it "does not display the uri" do
expect(key_list_object).to receive(:display_info).with(/https/).exactly(0).times
key_list_object.run
end
it "does not display the expired status" do
expect(key_list_object).to receive(:display_info).with(/\(expired\)/).exactly(0).times
key_list_object.run
end
end # when with_details is false
context "when with_details is true" do
before do
key_list_object.config[:with_details] = true
end
it "displays the uri" do
expect(key_list_object).to receive(:display_info).with(/https/).exactly(3).times
key_list_object.run
end
it "displays the expired status" do
expect(key_list_object).to receive(:display_info).with(/\(expired\)/).once
key_list_object.run
end
end # when with_details is true
end # when the command is run
end # key list run command
context "when list_method is :list_by_user" do
it_should_behave_like "key list run command" do
let(:list_method) { :list_by_user }
let(:http_response) do
[
{ "uri" => "https://api.opscode.piab/users/charmander/keys/non-expired1", "name" => "non-expired1", "expired" => false },
{ "uri" => "https://api.opscode.piab/users/charmander/keys/non-expired2", "name" => "non-expired2", "expired" => false },
{ "uri" => "https://api.opscode.piab/users/mary/keys/out-of-date", "name" => "out-of-date", "expired" => true },
]
end
end
end
context "when list_method is :list_by_client" do
it_should_behave_like "key list run command" do
let(:list_method) { :list_by_client }
let(:http_response) do
[
{ "uri" => "https://api.opscode.piab/organizations/pokemon/clients/charmander/keys/non-expired1", "name" => "non-expired1", "expired" => false },
{ "uri" => "https://api.opscode.piab/organizations/pokemon/clients/charmander/keys/non-expired2", "name" => "non-expired2", "expired" => false },
{ "uri" => "https://api.opscode.piab/organizations/pokemon/clients/mary/keys/out-of-date", "name" => "out-of-date", "expired" => true },
]
end
end
end
end
| 35.815668 | 155 | 0.659676 |
ffa6a1d9b8ebb5edeafa8f088d6f2ceeb83446ce | 2,377 | class User < ApplicationRecord
attr_accessor :remember_token, :activation_token, :reset_token
before_save :downcase_email
before_create :create_activation_digest
validates :name, presence: true, length: { maximum: 50 }
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-]+(\.[a-z\d\-]+)*\.[a-z]+\z/i
validates :email, presence: true, length: { maximum: 255 },
format: { with: VALID_EMAIL_REGEX },
uniqueness: { case_sensitive: false }
has_secure_password
validates :password, presence: true, length: { minimum: 6 }, allow_nil: true
# Returns the hash digest of the given string.
def User.digest(string)
cost = ActiveModel::SecurePassword.min_cost ? BCrypt::Engine::MIN_COST :
BCrypt::Engine.cost
BCrypt::Password.create(string, cost: cost)
end
# Returns a random token.
def User.new_token
SecureRandom.urlsafe_base64
end
def remember
self.remember_token = User.new_token
update_attribute(:remember_digest, User.digest(remember_token))
end
# Returns true if the given token matches the digest.
def authenticated?(attribute, token)
digest = send("#{attribute}_digest")
return false if digest.nil?
BCrypt::Password.new(digest).is_password?(token)
end
# Forgets a user.
def forget
update_attribute(:remember_digest, nil)
end
# Activates an account.
def activate
update_columns(activated: true, activated_at: Time.zone.now)
end
# Sends activation email.
def send_activation_email
UserMailer.account_activation(self).deliver_now
end
# Sets the password reset attributes.
def create_reset_digest
self.reset_token = User.new_token
update_columns(reset_digest: User.digest(reset_token), reset_sent_at: Time.zone.now)
end
# Sends password reset email.
def send_password_reset_email
UserMailer.password_reset(self).deliver_now
end
# Returns true if a password reset has expired.
def password_reset_expired?
reset_sent_at < 2.hours.ago
end
private
# Converts email to all lower-case.
def downcase_email
self.email.downcase!
end
# Creates and assigns the activation token and digest.
def create_activation_digest
self.activation_token = User.new_token
self.activation_digest = User.digest(activation_token)
end
end
| 28.638554 | 88 | 0.701304 |
39042f1656f13f3cf6c98579e75d7239cd6919ba | 1,645 | require_migration
describe AddContainerImageDigest do
let(:container_image_stub) { migration_stub(:ContainerImage) }
let(:row_entries) do
[
{:tag_in => 'sha256', :tag_out => 'sha256', :digest => nil},
{:tag_in => 'sha384', :tag_out => 'sha384', :digest => nil},
{:tag_in => 'sha512', :tag_out => 'sha512', :digest => nil},
{:tag_in => 'sha256abc', :tag_out => 'sha256abc', :digest => nil},
{:tag_in => 'sha384abc', :tag_out => 'sha384abc', :digest => nil},
{:tag_in => 'sha512abc', :tag_out => 'sha512abc', :digest => nil},
{:tag_in => 'sha256:abc', :tag_out => nil, :digest => 'sha256:abc'},
{:tag_in => 'sha384:abc', :tag_out => nil, :digest => 'sha384:abc'},
{:tag_in => 'sha512:abc', :tag_out => nil, :digest => 'sha512:abc'}
]
end
migration_context :up do
it "migrates a series of representative row" do
row_entries.each do |x|
x[:image] = container_image_stub.create!(:tag => x[:tag_in])
end
migrate
row_entries.each do |x|
expect(x[:image].reload).to have_attributes(
:tag => x[:tag_out],
:digest => x[:digest]
)
end
end
end
migration_context :down do
it "migrates a series of representative row" do
row_entries.each do |x|
x[:image] = container_image_stub.create!(
:tag => x[:tag_out],
:digest => x[:digest]
)
end
migrate
row_entries.each do |x|
expect(x[:image].reload).to have_attributes(:tag => x[:tag_in])
end
end
end
end
| 30.462963 | 83 | 0.545289 |
03b7834e893b8054342d0a8b2ccc2145c01541dd | 1,691 | class Mkclean < Formula
desc "Optimizes Matroska and WebM files"
homepage "https://www.matroska.org/downloads/mkclean.html"
url "https://downloads.sourceforge.net/project/matroska/mkclean/mkclean-0.8.10.tar.bz2"
sha256 "96773e72903b00d73e68ba9d5f19744a91ed46d27acd511a10eb23533589777d"
bottle do
cellar :any_skip_relocation
sha256 "eb519c8f3fb9b2773529d5e7a9751cec7e2a7a67a76af92cab0e6b48449dc6de" => :high_sierra
sha256 "73e502b5331d28da40fc3b94763f6ea30a141e48329bede7eddf3e396991671b" => :sierra
sha256 "a5db5b2309de19ea395efaafcf828c253e38133464faca623545a221f2b0ba52" => :el_capitan
sha256 "d6842897a02e0fc4d4e7ca046e1f41aaefebb13bceb324aabd2fbf1d96f6c024" => :x86_64_linux
end
# Fixes compile error with Xcode-4.3+, a hardcoded /Developer. Reported as:
# https://sourceforge.net/p/matroska/bugs/9/
patch :DATA if MacOS.prefer_64_bit?
def install
system "./mkclean/configure"
system "make", "mkclean"
bindir = `corec/tools/coremake/system_output.sh`.chomp
bin.install Dir["release/#{bindir}/mk*"]
end
test do
output = shell_output("#{bin}/mkclean --version 2>&1", 255)
assert_match version.to_s, output
end
end
__END__
--- a/corec/tools/coremake/gcc_osx_x64.build 2017-08-22 06:38:25.000000000 -0700
+++ b/corec/tools/coremake/gcc_osx_x64.build 2017-11-18 22:53:56.000000000 -0800
@@ -4,11 +4,10 @@
PLATFORMLIB = osx_x86
SVNDIR = osx_x86
-SDK = /Developer/SDKs/MacOSX10.5.sdk
//CC = xcrun --sdk macosx clang
-CCFLAGS=%(CCFLAGS) -arch x86_64 -mdynamic-no-pic -mmacosx-version-min=10.5
+CCFLAGS=%(CCFLAGS) -arch x86_64 -mdynamic-no-pic
ASMFLAGS = -f macho64 -D_MACHO -D_HIDDEN
#include "gcc_osx.inc"
| 35.229167 | 94 | 0.755174 |
791293a039f4f6bbc80f80e3cf90f5da84b0989a | 17,567 | # encoding: UTF-8
# This file contains data derived from the IANA Time Zone Database
# (http://www.iana.org/time-zones).
module TZInfo
module Data
module Definitions
module Atlantic
module Azores
include TimezoneDefinition
timezone 'Atlantic/Azores' do |tz|
tz.offset :o0, -6160, 0, :LMT
tz.offset :o1, -6872, 0, :HMT
tz.offset :o2, -7200, 0, :'-02'
tz.offset :o3, -7200, 3600, :'-01'
tz.offset :o4, -7200, 7200, :'+00'
tz.offset :o5, -3600, 0, :'-01'
tz.offset :o6, -3600, 3600, :'+00'
tz.offset :o7, 0, 0, :WET
tz.transition 1884, 1, :o1, -2713904240, 2601910697, 1080
tz.transition 1912, 1, :o2, -1830376800, 29032831, 12
tz.transition 1916, 6, :o3, -1689548400, 58104781, 24
tz.transition 1916, 11, :o2, -1677794400, 29054023, 12
tz.transition 1917, 3, :o3, -1667430000, 58110925, 24
tz.transition 1917, 10, :o2, -1647730800, 58116397, 24
tz.transition 1918, 3, :o3, -1635807600, 58119709, 24
tz.transition 1918, 10, :o2, -1616194800, 58125157, 24
tz.transition 1919, 3, :o3, -1604358000, 58128445, 24
tz.transition 1919, 10, :o2, -1584658800, 58133917, 24
tz.transition 1920, 3, :o3, -1572735600, 58137229, 24
tz.transition 1920, 10, :o2, -1553036400, 58142701, 24
tz.transition 1921, 3, :o3, -1541199600, 58145989, 24
tz.transition 1921, 10, :o2, -1521500400, 58151461, 24
tz.transition 1924, 4, :o3, -1442444400, 58173421, 24
tz.transition 1924, 10, :o2, -1426806000, 58177765, 24
tz.transition 1926, 4, :o3, -1379286000, 58190965, 24
tz.transition 1926, 10, :o2, -1364770800, 58194997, 24
tz.transition 1927, 4, :o3, -1348441200, 58199533, 24
tz.transition 1927, 10, :o2, -1333321200, 58203733, 24
tz.transition 1928, 4, :o3, -1316386800, 58208437, 24
tz.transition 1928, 10, :o2, -1301266800, 58212637, 24
tz.transition 1929, 4, :o3, -1284332400, 58217341, 24
tz.transition 1929, 10, :o2, -1269817200, 58221373, 24
tz.transition 1931, 4, :o3, -1221433200, 58234813, 24
tz.transition 1931, 10, :o2, -1206918000, 58238845, 24
tz.transition 1932, 4, :o3, -1191193200, 58243213, 24
tz.transition 1932, 10, :o2, -1175468400, 58247581, 24
tz.transition 1934, 4, :o3, -1127689200, 58260853, 24
tz.transition 1934, 10, :o2, -1111964400, 58265221, 24
tz.transition 1935, 3, :o3, -1096844400, 58269421, 24
tz.transition 1935, 10, :o2, -1080514800, 58273957, 24
tz.transition 1936, 4, :o3, -1063580400, 58278661, 24
tz.transition 1936, 10, :o2, -1049065200, 58282693, 24
tz.transition 1937, 4, :o3, -1033340400, 58287061, 24
tz.transition 1937, 10, :o2, -1017615600, 58291429, 24
tz.transition 1938, 3, :o3, -1002495600, 58295629, 24
tz.transition 1938, 10, :o2, -986166000, 58300165, 24
tz.transition 1939, 4, :o3, -969231600, 58304869, 24
tz.transition 1939, 11, :o2, -950482800, 58310077, 24
tz.transition 1940, 2, :o3, -942015600, 58312429, 24
tz.transition 1940, 10, :o2, -922662000, 58317805, 24
tz.transition 1941, 4, :o3, -906937200, 58322173, 24
tz.transition 1941, 10, :o2, -891126000, 58326565, 24
tz.transition 1942, 3, :o3, -877302000, 58330405, 24
tz.transition 1942, 4, :o4, -873676800, 4860951, 2
tz.transition 1942, 8, :o3, -864000000, 4861175, 2
tz.transition 1942, 10, :o2, -857948400, 58335781, 24
tz.transition 1943, 3, :o3, -845852400, 58339141, 24
tz.transition 1943, 4, :o4, -842832000, 4861665, 2
tz.transition 1943, 8, :o3, -831340800, 4861931, 2
tz.transition 1943, 10, :o2, -825894000, 58344685, 24
tz.transition 1944, 3, :o3, -814402800, 58347877, 24
tz.transition 1944, 4, :o4, -810777600, 4862407, 2
tz.transition 1944, 8, :o3, -799891200, 4862659, 2
tz.transition 1944, 10, :o2, -794444400, 58353421, 24
tz.transition 1945, 3, :o3, -782953200, 58356613, 24
tz.transition 1945, 4, :o4, -779328000, 4863135, 2
tz.transition 1945, 8, :o3, -768441600, 4863387, 2
tz.transition 1945, 10, :o2, -762994800, 58362157, 24
tz.transition 1946, 4, :o3, -749084400, 58366021, 24
tz.transition 1946, 10, :o2, -733359600, 58370389, 24
tz.transition 1947, 4, :o3, -717624000, 7296845, 3
tz.transition 1947, 10, :o2, -701899200, 7297391, 3
tz.transition 1948, 4, :o3, -686174400, 7297937, 3
tz.transition 1948, 10, :o2, -670449600, 7298483, 3
tz.transition 1949, 4, :o3, -654724800, 7299029, 3
tz.transition 1949, 10, :o2, -639000000, 7299575, 3
tz.transition 1951, 4, :o3, -591825600, 7301213, 3
tz.transition 1951, 10, :o2, -575496000, 7301780, 3
tz.transition 1952, 4, :o3, -559771200, 7302326, 3
tz.transition 1952, 10, :o2, -544046400, 7302872, 3
tz.transition 1953, 4, :o3, -528321600, 7303418, 3
tz.transition 1953, 10, :o2, -512596800, 7303964, 3
tz.transition 1954, 4, :o3, -496872000, 7304510, 3
tz.transition 1954, 10, :o2, -481147200, 7305056, 3
tz.transition 1955, 4, :o3, -465422400, 7305602, 3
tz.transition 1955, 10, :o2, -449697600, 7306148, 3
tz.transition 1956, 4, :o3, -433972800, 7306694, 3
tz.transition 1956, 10, :o2, -417643200, 7307261, 3
tz.transition 1957, 4, :o3, -401918400, 7307807, 3
tz.transition 1957, 10, :o2, -386193600, 7308353, 3
tz.transition 1958, 4, :o3, -370468800, 7308899, 3
tz.transition 1958, 10, :o2, -354744000, 7309445, 3
tz.transition 1959, 4, :o3, -339019200, 7309991, 3
tz.transition 1959, 10, :o2, -323294400, 7310537, 3
tz.transition 1960, 4, :o3, -307569600, 7311083, 3
tz.transition 1960, 10, :o2, -291844800, 7311629, 3
tz.transition 1961, 4, :o3, -276120000, 7312175, 3
tz.transition 1961, 10, :o2, -260395200, 7312721, 3
tz.transition 1962, 4, :o3, -244670400, 7313267, 3
tz.transition 1962, 10, :o2, -228340800, 7313834, 3
tz.transition 1963, 4, :o3, -212616000, 7314380, 3
tz.transition 1963, 10, :o2, -196891200, 7314926, 3
tz.transition 1964, 4, :o3, -181166400, 7315472, 3
tz.transition 1964, 10, :o2, -165441600, 7316018, 3
tz.transition 1965, 4, :o3, -149716800, 7316564, 3
tz.transition 1965, 10, :o2, -133992000, 7317110, 3
tz.transition 1966, 4, :o5, -118267200, 7317656, 3
tz.transition 1977, 3, :o6, 228272400
tz.transition 1977, 9, :o5, 243997200
tz.transition 1978, 4, :o6, 260326800
tz.transition 1978, 10, :o5, 276051600
tz.transition 1979, 4, :o6, 291776400
tz.transition 1979, 9, :o5, 307504800
tz.transition 1980, 3, :o6, 323226000
tz.transition 1980, 9, :o5, 338954400
tz.transition 1981, 3, :o6, 354679200
tz.transition 1981, 9, :o5, 370404000
tz.transition 1982, 3, :o6, 386128800
tz.transition 1982, 9, :o5, 401853600
tz.transition 1983, 3, :o6, 417582000
tz.transition 1983, 9, :o5, 433303200
tz.transition 1984, 3, :o6, 449028000
tz.transition 1984, 9, :o5, 465357600
tz.transition 1985, 3, :o6, 481082400
tz.transition 1985, 9, :o5, 496807200
tz.transition 1986, 3, :o6, 512532000
tz.transition 1986, 9, :o5, 528256800
tz.transition 1987, 3, :o6, 543981600
tz.transition 1987, 9, :o5, 559706400
tz.transition 1988, 3, :o6, 575431200
tz.transition 1988, 9, :o5, 591156000
tz.transition 1989, 3, :o6, 606880800
tz.transition 1989, 9, :o5, 622605600
tz.transition 1990, 3, :o6, 638330400
tz.transition 1990, 9, :o5, 654660000
tz.transition 1991, 3, :o6, 670384800
tz.transition 1991, 9, :o5, 686109600
tz.transition 1992, 3, :o6, 701834400
tz.transition 1992, 9, :o7, 717559200
tz.transition 1993, 3, :o6, 733280400
tz.transition 1993, 9, :o5, 749005200
tz.transition 1994, 3, :o6, 764730000
tz.transition 1994, 9, :o5, 780454800
tz.transition 1995, 3, :o6, 796179600
tz.transition 1995, 9, :o5, 811904400
tz.transition 1996, 3, :o6, 828234000
tz.transition 1996, 10, :o5, 846378000
tz.transition 1997, 3, :o6, 859683600
tz.transition 1997, 10, :o5, 877827600
tz.transition 1998, 3, :o6, 891133200
tz.transition 1998, 10, :o5, 909277200
tz.transition 1999, 3, :o6, 922582800
tz.transition 1999, 10, :o5, 941331600
tz.transition 2000, 3, :o6, 954032400
tz.transition 2000, 10, :o5, 972781200
tz.transition 2001, 3, :o6, 985482000
tz.transition 2001, 10, :o5, 1004230800
tz.transition 2002, 3, :o6, 1017536400
tz.transition 2002, 10, :o5, 1035680400
tz.transition 2003, 3, :o6, 1048986000
tz.transition 2003, 10, :o5, 1067130000
tz.transition 2004, 3, :o6, 1080435600
tz.transition 2004, 10, :o5, 1099184400
tz.transition 2005, 3, :o6, 1111885200
tz.transition 2005, 10, :o5, 1130634000
tz.transition 2006, 3, :o6, 1143334800
tz.transition 2006, 10, :o5, 1162083600
tz.transition 2007, 3, :o6, 1174784400
tz.transition 2007, 10, :o5, 1193533200
tz.transition 2008, 3, :o6, 1206838800
tz.transition 2008, 10, :o5, 1224982800
tz.transition 2009, 3, :o6, 1238288400
tz.transition 2009, 10, :o5, 1256432400
tz.transition 2010, 3, :o6, 1269738000
tz.transition 2010, 10, :o5, 1288486800
tz.transition 2011, 3, :o6, 1301187600
tz.transition 2011, 10, :o5, 1319936400
tz.transition 2012, 3, :o6, 1332637200
tz.transition 2012, 10, :o5, 1351386000
tz.transition 2013, 3, :o6, 1364691600
tz.transition 2013, 10, :o5, 1382835600
tz.transition 2014, 3, :o6, 1396141200
tz.transition 2014, 10, :o5, 1414285200
tz.transition 2015, 3, :o6, 1427590800
tz.transition 2015, 10, :o5, 1445734800
tz.transition 2016, 3, :o6, 1459040400
tz.transition 2016, 10, :o5, 1477789200
tz.transition 2017, 3, :o6, 1490490000
tz.transition 2017, 10, :o5, 1509238800
tz.transition 2018, 3, :o6, 1521939600
tz.transition 2018, 10, :o5, 1540688400
tz.transition 2019, 3, :o6, 1553994000
tz.transition 2019, 10, :o5, 1572138000
tz.transition 2020, 3, :o6, 1585443600
tz.transition 2020, 10, :o5, 1603587600
tz.transition 2021, 3, :o6, 1616893200
tz.transition 2021, 10, :o5, 1635642000
tz.transition 2022, 3, :o6, 1648342800
tz.transition 2022, 10, :o5, 1667091600
tz.transition 2023, 3, :o6, 1679792400
tz.transition 2023, 10, :o5, 1698541200
tz.transition 2024, 3, :o6, 1711846800
tz.transition 2024, 10, :o5, 1729990800
tz.transition 2025, 3, :o6, 1743296400
tz.transition 2025, 10, :o5, 1761440400
tz.transition 2026, 3, :o6, 1774746000
tz.transition 2026, 10, :o5, 1792890000
tz.transition 2027, 3, :o6, 1806195600
tz.transition 2027, 10, :o5, 1824944400
tz.transition 2028, 3, :o6, 1837645200
tz.transition 2028, 10, :o5, 1856394000
tz.transition 2029, 3, :o6, 1869094800
tz.transition 2029, 10, :o5, 1887843600
tz.transition 2030, 3, :o6, 1901149200
tz.transition 2030, 10, :o5, 1919293200
tz.transition 2031, 3, :o6, 1932598800
tz.transition 2031, 10, :o5, 1950742800
tz.transition 2032, 3, :o6, 1964048400
tz.transition 2032, 10, :o5, 1982797200
tz.transition 2033, 3, :o6, 1995498000
tz.transition 2033, 10, :o5, 2014246800
tz.transition 2034, 3, :o6, 2026947600
tz.transition 2034, 10, :o5, 2045696400
tz.transition 2035, 3, :o6, 2058397200
tz.transition 2035, 10, :o5, 2077146000
tz.transition 2036, 3, :o6, 2090451600
tz.transition 2036, 10, :o5, 2108595600
tz.transition 2037, 3, :o6, 2121901200
tz.transition 2037, 10, :o5, 2140045200
tz.transition 2038, 3, :o6, 2153350800, 59172253, 24
tz.transition 2038, 10, :o5, 2172099600, 59177461, 24
tz.transition 2039, 3, :o6, 2184800400, 59180989, 24
tz.transition 2039, 10, :o5, 2203549200, 59186197, 24
tz.transition 2040, 3, :o6, 2216250000, 59189725, 24
tz.transition 2040, 10, :o5, 2234998800, 59194933, 24
tz.transition 2041, 3, :o6, 2248304400, 59198629, 24
tz.transition 2041, 10, :o5, 2266448400, 59203669, 24
tz.transition 2042, 3, :o6, 2279754000, 59207365, 24
tz.transition 2042, 10, :o5, 2297898000, 59212405, 24
tz.transition 2043, 3, :o6, 2311203600, 59216101, 24
tz.transition 2043, 10, :o5, 2329347600, 59221141, 24
tz.transition 2044, 3, :o6, 2342653200, 59224837, 24
tz.transition 2044, 10, :o5, 2361402000, 59230045, 24
tz.transition 2045, 3, :o6, 2374102800, 59233573, 24
tz.transition 2045, 10, :o5, 2392851600, 59238781, 24
tz.transition 2046, 3, :o6, 2405552400, 59242309, 24
tz.transition 2046, 10, :o5, 2424301200, 59247517, 24
tz.transition 2047, 3, :o6, 2437606800, 59251213, 24
tz.transition 2047, 10, :o5, 2455750800, 59256253, 24
tz.transition 2048, 3, :o6, 2469056400, 59259949, 24
tz.transition 2048, 10, :o5, 2487200400, 59264989, 24
tz.transition 2049, 3, :o6, 2500506000, 59268685, 24
tz.transition 2049, 10, :o5, 2519254800, 59273893, 24
tz.transition 2050, 3, :o6, 2531955600, 59277421, 24
tz.transition 2050, 10, :o5, 2550704400, 59282629, 24
tz.transition 2051, 3, :o6, 2563405200, 59286157, 24
tz.transition 2051, 10, :o5, 2582154000, 59291365, 24
tz.transition 2052, 3, :o6, 2595459600, 59295061, 24
tz.transition 2052, 10, :o5, 2613603600, 59300101, 24
tz.transition 2053, 3, :o6, 2626909200, 59303797, 24
tz.transition 2053, 10, :o5, 2645053200, 59308837, 24
tz.transition 2054, 3, :o6, 2658358800, 59312533, 24
tz.transition 2054, 10, :o5, 2676502800, 59317573, 24
tz.transition 2055, 3, :o6, 2689808400, 59321269, 24
tz.transition 2055, 10, :o5, 2708557200, 59326477, 24
tz.transition 2056, 3, :o6, 2721258000, 59330005, 24
tz.transition 2056, 10, :o5, 2740006800, 59335213, 24
tz.transition 2057, 3, :o6, 2752707600, 59338741, 24
tz.transition 2057, 10, :o5, 2771456400, 59343949, 24
tz.transition 2058, 3, :o6, 2784762000, 59347645, 24
tz.transition 2058, 10, :o5, 2802906000, 59352685, 24
tz.transition 2059, 3, :o6, 2816211600, 59356381, 24
tz.transition 2059, 10, :o5, 2834355600, 59361421, 24
tz.transition 2060, 3, :o6, 2847661200, 59365117, 24
tz.transition 2060, 10, :o5, 2866410000, 59370325, 24
tz.transition 2061, 3, :o6, 2879110800, 59373853, 24
tz.transition 2061, 10, :o5, 2897859600, 59379061, 24
tz.transition 2062, 3, :o6, 2910560400, 59382589, 24
tz.transition 2062, 10, :o5, 2929309200, 59387797, 24
tz.transition 2063, 3, :o6, 2942010000, 59391325, 24
tz.transition 2063, 10, :o5, 2960758800, 59396533, 24
tz.transition 2064, 3, :o6, 2974064400, 59400229, 24
tz.transition 2064, 10, :o5, 2992208400, 59405269, 24
tz.transition 2065, 3, :o6, 3005514000, 59408965, 24
tz.transition 2065, 10, :o5, 3023658000, 59414005, 24
tz.transition 2066, 3, :o6, 3036963600, 59417701, 24
tz.transition 2066, 10, :o5, 3055712400, 59422909, 24
tz.transition 2067, 3, :o6, 3068413200, 59426437, 24
tz.transition 2067, 10, :o5, 3087162000, 59431645, 24
tz.transition 2068, 3, :o6, 3099862800, 59435173, 24
tz.transition 2068, 10, :o5, 3118611600, 59440381, 24
tz.transition 2069, 3, :o6, 3131917200, 59444077, 24
tz.transition 2069, 10, :o5, 3150061200, 59449117, 24
end
end
end
end
end
end
| 55.94586 | 69 | 0.57978 |
386ac283b83ff37d31dee3fc6c4eea38bb45d7c2 | 5,325 | require 'yabeda/prometheus'
require 'rack'
module ThreeScale
module Backend
class ListenerMetrics
AUTH_AND_REPORT_REQUEST_TYPES = {
'/transactions/authorize.xml' => 'authorize',
'/transactions/oauth_authorize.xml' => 'authorize_oauth',
'/transactions/authrep.xml' => 'authrep',
'/transactions/oauth_authrep.xml' => 'authrep_oauth',
'/transactions.xml' => 'report'
}
private_constant :AUTH_AND_REPORT_REQUEST_TYPES
# Only the first match is taken into account, that's why for example,
# "/\/services\/.*\/stats/" needs to appear before "/\/services/"
INTERNAL_API_PATHS = [
[/\/services\/.*\/alert_limits/, 'alerts'.freeze],
[/\/services\/.*\/applications\/.*\/keys/, 'application_keys'.freeze],
[/\/services\/.*\/applications\/.*\/referrer_filters/, 'application_referrer_filters'.freeze],
[/\/services\/.*\/applications\/.*\/utilization/, 'utilization'.freeze],
[/\/services\/.*\/applications/, 'applications'.freeze],
[/\/services\/.*\/errors/, 'errors'.freeze],
[/\/events/, 'events'.freeze],
[/\/services\/.*\/metrics/, 'metrics'.freeze],
[/\/service_tokens/, 'service_tokens'.freeze],
[/\/services\/.*\/stats/, 'stats'.freeze],
[/\/services\/.*\/plans\/.*\/usagelimits/, 'usage_limits'.freeze],
[/\/services/, 'services'.freeze],
].freeze
private_constant :INTERNAL_API_PATHS
# Most requests will be under 100ms, so use a higher granularity from there
TIME_BUCKETS = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.25, 0.5, 0.75, 1]
private_constant :TIME_BUCKETS
class << self
ERRORS_4XX_TO_TRACK = Set[403, 404, 409].freeze
private_constant :ERRORS_4XX_TO_TRACK
def start_metrics_server(port = nil)
configure_data_store
define_metrics
# Yabeda does not accept the port as a param
ENV['PROMETHEUS_EXPORTER_PORT'] = port.to_s if port
Yabeda::Prometheus::Exporter.start_metrics_server!
end
def report_resp_code(path, resp_code)
req_type = req_type(path)
prometheus_group = prometheus_group(req_type)
Yabeda.send(prometheus_group).response_codes.increment(
{
request_type: req_type,
resp_code: code_group(resp_code)
},
by: 1
)
end
def report_response_time(path, request_time)
req_type = req_type(path)
prometheus_group = prometheus_group(req_type)
Yabeda.send(prometheus_group).response_times.measure(
{ request_type: req_type },
request_time
)
end
private
def configure_data_store
# Needed to aggregate metrics across processes.
# Ref: https://github.com/yabeda-rb/yabeda-prometheus#multi-process-server-support
Dir['/tmp/prometheus/*.bin'].each do |file_path|
File.unlink(file_path)
end
Prometheus::Client.config.data_store = Prometheus::Client::DataStores::DirectFileStore.new(
dir: '/tmp/prometheus'
)
end
def define_metrics
Yabeda.configure do
group :apisonator_listener do
counter :response_codes do
comment 'Response codes'
tags %i[request_type resp_code]
end
histogram :response_times do
comment 'Response times'
unit :seconds
tags %i[request_type]
buckets TIME_BUCKETS
end
end
group :apisonator_listener_internal_api do
counter :response_codes do
comment 'Response codes'
tags %i[request_type resp_code]
end
histogram :response_times do
comment 'Response times'
unit :seconds
tags %i[request_type]
buckets TIME_BUCKETS
end
end
end
# Note that this method raises if called more than once. Both
# listeners and workers define their metrics, but that's fine because
# a process cannot act as both.
Yabeda.configure!
end
def code_group(resp_code)
case resp_code
when (200...300)
'2xx'.freeze
when (400...500)
ERRORS_4XX_TO_TRACK.include?(resp_code) ? resp_code : '4xx'.freeze
when (500...600)
'5xx'.freeze
else
'unknown'.freeze
end
end
def req_type(path)
AUTH_AND_REPORT_REQUEST_TYPES[path] || internal_api_req_type(path)
end
def internal_api_req_type(path)
(_regex, type) = INTERNAL_API_PATHS.find { |(regex, _)| regex.match path }
type
end
# Returns the group as defined in .define_metrics
def prometheus_group(request_type)
if AUTH_AND_REPORT_REQUEST_TYPES.values.include? request_type
:apisonator_listener
else
:apisonator_listener_internal_api
end
end
end
end
end
end
| 33.490566 | 102 | 0.578592 |
1c28ecf31eb50d7137f48ec7b2fdc32a178a96e5 | 349 | module Softcover
module Commands
module Auth
extend self
def login(email, password)
require "softcover/client"
client = Softcover::Client.new email, password
client.login!
end
def logout
require "softcover/config"
Softcover::Config['api_key'] = nil
end
end
end
end
| 18.368421 | 54 | 0.604585 |
87bb428c67496b32c69a2327337b5243f139aaf3 | 28,373 | #
# Author:: Adam Jacob (<[email protected]>)
# Author:: Christopher Brown (<[email protected]>)
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# Copyright:: Copyright (c) 2010 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
require 'uri'
require 'net/https'
require 'stringio'
SIGNING_KEY_DOT_PEM="-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEA49TA0y81ps0zxkOpmf5V4/c4IeR5yVyQFpX3JpxO4TquwnRh
8VSUhrw8kkTLmB3cS39Db+3HadvhoqCEbqPE6915kXSuk/cWIcNozujLK7tkuPEy
YVsyTioQAddSdfe+8EhQVf3oHxaKmUd6waXrWqYCnhxgOjxocenREYNhZ/OETIei
PbOku47vB4nJK/0GhKBytL2XnsRgfKgDxf42BqAi1jglIdeq8lAWZNF9TbNBU21A
O1iuT7Pm6LyQujhggPznR5FJhXKRUARXBJZawxpGV4dGtdcahwXNE4601aXPra+x
PcRd2puCNoEDBzgVuTSsLYeKBDMSfs173W1QYwIDAQABAoIBAGF05q7vqOGbMaSD
2Q7YbuE/JTHKTBZIlBI1QC2x+0P5GDxyEFttNMOVzcs7xmNhkpRw8eX1LrInrpMk
WsIBKAFFEfWYlf0RWtRChJjNl+szE9jQxB5FJnWtJH/FHa78tR6PsF24aQyzVcJP
g0FGujBihwgfV0JSCNOBkz8MliQihjQA2i8PGGmo4R4RVzGfxYKTIq9vvRq/+QEa
Q4lpVLoBqnENpnY/9PTl6JMMjW2b0spbLjOPVwDaIzXJ0dChjNXo15K5SHI5mALJ
I5gN7ODGb8PKUf4619ez194FXq+eob5YJdilTFKensIUvt3YhP1ilGMM+Chi5Vi/
/RCTw3ECgYEA9jTw4wv9pCswZ9wbzTaBj9yZS3YXspGg26y6Ohq3ZmvHz4jlT6uR
xK+DDcUiK4072gci8S4Np0fIVS7q6ivqcOdzXPrTF5/j+MufS32UrBbUTPiM1yoO
ECcy+1szl/KoLEV09bghPbvC58PFSXV71evkaTETYnA/F6RK12lEepcCgYEA7OSy
bsMrGDVU/MKJtwqyGP9ubA53BorM4Pp9VVVSCrGGVhb9G/XNsjO5wJC8J30QAo4A
s59ZzCpyNRy046AB8jwRQuSwEQbejSdeNgQGXhZ7aIVUtuDeFFdaIz/zjVgxsfj4
DPOuzieMmJ2MLR4F71ocboxNoDI7xruPSE8dDhUCgYA3vx732cQxgtHwAkeNPJUz
dLiE/JU7CnxIoSB9fYUfPLI+THnXgzp7NV5QJN2qzMzLfigsQcg3oyo6F2h7Yzwv
GkjlualIRRzCPaCw4Btkp7qkPvbs1QngIHALt8fD1N69P3DPHkTwjG4COjKWgnJq
qoHKS6Fe/ZlbigikI6KsuwKBgQCTlSLoyGRHr6oj0hqz01EDK9ciMJzMkZp0Kvn8
OKxlBxYW+jlzut4MQBdgNYtS2qInxUoAnaz2+hauqhSzntK3k955GznpUatCqx0R
b857vWviwPX2/P6+E3GPdl8IVsKXCvGWOBZWTuNTjQtwbDzsUepWoMgXnlQJSn5I
YSlLxQKBgQD16Gw9kajpKlzsPa6XoQeGmZALT6aKWJQlrKtUQIrsIWM0Z6eFtX12
2jjHZ0awuCQ4ldqwl8IfRogWMBkHOXjTPVK0YKWWlxMpD/5+bGPARa5fir8O1Zpo
Y6S6MeZ69Rp89ma4ttMZ+kwi1+XyHqC/dlcVRW42Zl5Dc7BALRlJjQ==
-----END RSA PRIVATE KEY-----"
describe Chef::REST do
before(:each) do
@log_stringio = StringIO.new
Chef::Log.init(@log_stringio)
Chef::REST::CookieJar.stub!(:instance).and_return({})
@base_url = "http://chef.example.com:4000"
@monkey_uri = URI.parse("http://chef.example.com:4000/monkey")
@rest = Chef::REST.new(@base_url, nil, nil)
Chef::REST::CookieJar.instance.clear
end
describe "calling an HTTP verb on a path or absolute URL" do
it "adds a relative URL to the base url it was initialized with" do
@rest.create_url("foo/bar/baz").should == URI.parse(@base_url + "/foo/bar/baz")
end
it "replaces the base URL when given an absolute URL" do
@rest.create_url("http://chef-rulez.example.com:9000").should == URI.parse("http://chef-rulez.example.com:9000")
end
it "makes a :GET request with the composed url object" do
@rest.should_receive(:api_request).with(:GET, @monkey_uri, {})
@rest.get_rest("monkey")
end
it "makes a :GET reqest for a streaming download with the composed url" do
@rest.should_receive(:streaming_request).with(@monkey_uri, {})
@rest.get_rest("monkey", true)
end
it "makes a :DELETE request with the composed url object" do
@rest.should_receive(:api_request).with(:DELETE, @monkey_uri, {})
@rest.delete_rest("monkey")
end
it "makes a :POST request with the composed url object and data" do
@rest.should_receive(:api_request).with(:POST, @monkey_uri, {}, "data")
@rest.post_rest("monkey", "data")
end
it "makes a :PUT request with the composed url object and data" do
@rest.should_receive(:api_request).with(:PUT, @monkey_uri, {}, "data")
@rest.put_rest("monkey", "data")
end
end
describe "when configured to authenticate to the Chef server" do
before do
@url = URI.parse("http://chef.example.com:4000")
Chef::Config[:node_name] = "webmonkey.example.com"
Chef::Config[:client_key] = CHEF_SPEC_DATA + "/ssl/private_key.pem"
@rest = Chef::REST.new(@url)
end
it "configures itself to use the node_name and client_key in the config by default" do
@rest.client_name.should == "webmonkey.example.com"
@rest.signing_key_filename.should == CHEF_SPEC_DATA + "/ssl/private_key.pem"
end
it "provides access to the raw key data" do
@rest.signing_key.should == SIGNING_KEY_DOT_PEM
end
it "does not error out when initialized without credentials" do
@rest = Chef::REST.new(@url, nil, nil) #should_not raise_error hides the bt from you, so screw it.
@rest.client_name.should be_nil
@rest.signing_key.should be_nil
end
it "indicates that requests should not be signed when it has no credentials" do
@rest = Chef::REST.new(@url, nil, nil)
@rest.sign_requests?.should be_false
end
end
context "when making REST requests" do
before(:each) do
Chef::Config[:ssl_client_cert] = nil
Chef::Config[:ssl_client_key] = nil
@url = URI.parse("https://one:80/?foo=bar")
@http_response = Net::HTTPSuccess.new("1.1", "200", "successful rest req")
@http_response.stub!(:read_body)
@http_response.stub!(:body).and_return("ninja")
@http_response.add_field("Content-Length", "5")
@http_client = Net::HTTP.new(@url.host, @url.port)
Net::HTTP.stub!(:new).and_return(@http_client)
@http_client.stub!(:request).and_yield(@http_response).and_return(@http_response)
@base_headers = { 'Accept' => 'application/json',
'X-Chef-Version' => Chef::VERSION,
'Accept-Encoding' => Chef::REST::RESTRequest::ENCODING_GZIP_DEFLATE}
@req_with_body_headers = @base_headers.merge("Content-Type" => "application/json", "Content-Length" => '13')
end
describe "using the run_request API" do
it "should build a new HTTP GET request" do
request = Net::HTTP::Get.new(@url.path)
Net::HTTP::Get.should_receive(:new).with("/?foo=bar", @base_headers).and_return(request)
@rest.run_request(:GET, @url, {})
end
it "should build a new HTTP POST request" do
request = Net::HTTP::Post.new(@url.path)
Net::HTTP::Post.should_receive(:new).with("/?foo=bar", @req_with_body_headers).and_return(request)
@rest.run_request(:POST, @url, {}, {:one=>:two})
request.body.should == '{"one":"two"}'
end
it "should build a new HTTP PUT request" do
request = Net::HTTP::Put.new(@url.path)
expected_headers = @base_headers.merge("Content-Length" => '13')
Net::HTTP::Put.should_receive(:new).with("/?foo=bar", @req_with_body_headers).and_return(request)
@rest.run_request(:PUT, @url, {}, {:one=>:two})
request.body.should == '{"one":"two"}'
end
it "should build a new HTTP DELETE request" do
request = Net::HTTP::Delete.new(@url.path)
Net::HTTP::Delete.should_receive(:new).with("/?foo=bar", @base_headers).and_return(request)
@rest.run_request(:DELETE, @url)
end
it "should raise an error if the method is not GET/PUT/POST/DELETE" do
lambda { @rest.api_request(:MONKEY, @url) }.should raise_error(ArgumentError)
end
it "returns the response body when the response is successful but content-type is not JSON" do
@rest.run_request(:GET, @url).should == "ninja"
end
it "should call read_body without a block if the request is not raw" do
@http_response.should_receive(:body)
@rest.run_request(:GET, @url, {}, nil, false)
end
it "should inflate the body as to an object if JSON is returned" do
@http_response.add_field("content-type", "application/json")
Chef::JSONCompat.should_receive(:from_json).with("ninja").and_return("ohai2u_success")
@rest.run_request(:GET, @url, {}).should == "ohai2u_success"
end
it "should return false on a Not Modified response" do
http_response = Net::HTTPNotModified.new("1.1", "304", "It's old Bob")
@http_client.stub!(:request).and_yield(http_response).and_return(http_response)
http_response.stub!(:read_body)
@rest.run_request(:GET, @url).should be_false
end
%w[ HTTPFound HTTPMovedPermanently HTTPSeeOther HTTPUseProxy HTTPTemporaryRedirect HTTPMultipleChoice ].each do |resp_name|
it "should call run_request again on a #{resp_name} response" do
resp_cls = Net.const_get(resp_name)
resp_code = Net::HTTPResponse::CODE_TO_OBJ.keys.detect { |k| Net::HTTPResponse::CODE_TO_OBJ[k] == resp_cls }
http_response = resp_cls.new("1.1", resp_code, "bob somewhere else")
http_response.add_field("location", @url.path)
http_response.stub!(:read_body)
@http_client.stub!(:request).and_yield(http_response).and_return(http_response)
lambda { @rest.run_request(:GET, @url) }.should raise_error(Chef::Exceptions::RedirectLimitExceeded)
end
end
# CHEF-3140
context "when configured to disable compression" do
before do
@rest = Chef::REST.new(@base_url, nil, nil, :disable_gzip => true)
end
it "does not accept encoding gzip" do
@rest.send(:build_headers, :GET, @url, {}).should_not have_key("Accept-Encoding")
end
it "does not decompress a response encoded as gzip" do
@http_response.add_field("content-encoding", "gzip")
request = Net::HTTP::Get.new(@url.path)
Net::HTTP::Get.should_receive(:new).and_return(request)
# will raise a Zlib error if incorrect
@rest.api_request(:GET, @url, {}).should == "ninja"
end
end
it "should show the JSON error message on an unsuccessful request" do
http_response = Net::HTTPServerError.new("1.1", "500", "drooling from inside of mouth")
http_response.add_field("content-type", "application/json")
http_response.stub!(:body).and_return('{ "error":[ "Ears get sore!", "Not even four" ] }')
http_response.stub!(:read_body)
@http_client.stub!(:request).and_yield(http_response).and_return(http_response)
@rest.stub!(:sleep)
lambda {@rest.run_request(:GET, @url)}.should raise_error(Net::HTTPFatalError)
@log_stringio.string.should match(Regexp.escape('WARN: HTTP Request Returned 500 drooling from inside of mouth: Ears get sore!, Not even four'))
end
it "should raise an exception on an unsuccessful request" do
@http_response = Net::HTTPServerError.new("1.1", "500", "drooling from inside of mouth")
http_response = Net::HTTPServerError.new("1.1", "500", "drooling from inside of mouth")
http_response.stub!(:read_body)
@rest.stub!(:sleep)
@http_client.stub!(:request).and_yield(http_response).and_return(http_response)
lambda {@rest.run_request(:GET, @url)}.should raise_error(Net::HTTPFatalError)
end
it "adds the rest_request object to any http exception raised" do
@http_response = Net::HTTPServerError.new("1.1", "500", "drooling from inside of mouth")
http_response = Net::HTTPServerError.new("1.1", "500", "drooling from inside of mouth")
http_response.stub!(:read_body)
@rest.stub!(:sleep)
@http_client.stub!(:request).and_yield(http_response).and_return(http_response)
exception = begin
@rest.api_request(:GET, @url, {})
rescue => e
e
end
e.chef_rest_request.url.should == @url
e.chef_rest_request.method.should == :GET
end
describe "streaming downloads to a tempfile" do
before do
@tempfile = Tempfile.open("chef-rspec-rest_spec-line-#{__LINE__}--")
Tempfile.stub!(:new).with("chef-rest").and_return(@tempfile)
Tempfile.stub!(:open).and_return(@tempfile)
@request_mock = {}
Net::HTTP::Get.stub!(:new).and_return(@request_mock)
@http_response_mock = mock("Net::HTTP Response mock")
end
after do
@tempfile.rspec_reset
@tempfile.close!
end
it "should build a new HTTP GET request without the application/json accept header" do
expected_headers = {'X-Chef-Version' => Chef::VERSION, 'Accept-Encoding' => Chef::REST::RESTRequest::ENCODING_GZIP_DEFLATE}
Net::HTTP::Get.should_receive(:new).with("/?foo=bar", expected_headers).and_return(@request_mock)
@rest.run_request(:GET, @url, {}, false, nil, true)
end
it "should create a tempfile for the output of a raw request" do
@rest.run_request(:GET, @url, {}, false, nil, true).should equal(@tempfile)
end
it "should read the body of the response in chunks on a raw request" do
@http_response.should_receive(:read_body).and_return(true)
@rest.run_request(:GET, @url, {}, false, nil, true)
end
it "should populate the tempfile with the value of the raw request" do
@http_response_mock.stub!(:read_body).and_yield("ninja")
@tempfile.should_receive(:write).with("ninja").once.and_return(true)
@rest.run_request(:GET, @url, {}, false, nil, true)
end
it "should close the tempfile if we're doing a raw request" do
@tempfile.should_receive(:close).once.and_return(true)
@rest.run_request(:GET, @url, {}, false, nil, true)
end
it "should not raise a divide by zero exception if the size is 0" do
@http_response_mock.stub!(:header).and_return({ 'Content-Length' => "5" })
@http_response_mock.stub!(:read_body).and_yield('')
lambda { @rest.run_request(:GET, @url, {}, false, nil, true) }.should_not raise_error(ZeroDivisionError)
end
it "should not raise a divide by zero exception if the Content-Length is 0" do
@http_response_mock.stub!(:header).and_return({ 'Content-Length' => "0" })
@http_response_mock.stub!(:read_body).and_yield("ninja")
lambda { @rest.run_request(:GET, @url, {}, false, nil, true) }.should_not raise_error(ZeroDivisionError)
end
end
end
describe "as JSON API requests" do
before do
@request_mock = {}
Net::HTTP::Get.stub!(:new).and_return(@request_mock)
@base_headers = {"Accept" => "application/json",
"X-Chef-Version" => Chef::VERSION,
"Accept-Encoding" => Chef::REST::RESTRequest::ENCODING_GZIP_DEFLATE
}
end
it "should always include the X-Chef-Version header" do
Net::HTTP::Get.should_receive(:new).with("/?foo=bar", @base_headers).and_return(@request_mock)
@rest.api_request(:GET, @url, {})
end
it "sets the user agent to chef-client" do
# must reset to default b/c knife changes the UA
Chef::REST::RESTRequest.user_agent = Chef::REST::RESTRequest::DEFAULT_UA
@rest.api_request(:GET, @url, {})
@request_mock['User-Agent'].should match /^Chef Client\/#{Chef::VERSION}/
end
context "when configured with custom http headers" do
before(:each) do
@custom_headers = {
'X-Custom-ChefSecret' => 'sharpknives',
'X-Custom-RequestPriority' => 'extremely low'
}
Chef::Config[:custom_http_headers] = @custom_headers
end
after(:each) do
Chef::Config[:custom_http_headers] = nil
end
it "should set them on the http request" do
url_string = an_instance_of(String)
header_hash = hash_including(@custom_headers)
Net::HTTP::Get.should_receive(:new).with(url_string, header_hash)
@rest.api_request(:GET, @url, {})
end
end
it "should set the cookie for this request if one exists for the given host:port" do
Chef::REST::CookieJar.instance["#{@url.host}:#{@url.port}"] = "cookie monster"
Net::HTTP::Get.should_receive(:new).with("/?foo=bar", @base_headers.merge('Cookie' => "cookie monster")).and_return(@request_mock)
@rest.api_request(:GET, @url, {})
end
it "should build a new HTTP GET request" do
Net::HTTP::Get.should_receive(:new).with("/?foo=bar", @base_headers).and_return(@request_mock)
@rest.api_request(:GET, @url, {})
end
it "should build a new HTTP POST request" do
request = Net::HTTP::Post.new(@url.path)
expected_headers = @base_headers.merge("Content-Type" => 'application/json', 'Content-Length' => '13')
Net::HTTP::Post.should_receive(:new).with("/?foo=bar", expected_headers).and_return(request)
@rest.api_request(:POST, @url, {}, {:one=>:two})
request.body.should == '{"one":"two"}'
end
it "should build a new HTTP PUT request" do
request = Net::HTTP::Put.new(@url.path)
expected_headers = @base_headers.merge("Content-Type" => 'application/json', 'Content-Length' => '13')
Net::HTTP::Put.should_receive(:new).with("/?foo=bar",expected_headers).and_return(request)
@rest.api_request(:PUT, @url, {}, {:one=>:two})
request.body.should == '{"one":"two"}'
end
it "should build a new HTTP DELETE request" do
Net::HTTP::Delete.should_receive(:new).with("/?foo=bar", @base_headers).and_return(@request_mock)
@rest.api_request(:DELETE, @url)
end
it "should raise an error if the method is not GET/PUT/POST/DELETE" do
lambda { @rest.api_request(:MONKEY, @url) }.should raise_error(ArgumentError)
end
it "returns nil when the response is successful but content-type is not JSON" do
@rest.api_request(:GET, @url).should == "ninja"
end
it "should inflate the body as to an object if JSON is returned" do
@http_response.add_field('content-type', "application/json")
@http_response.stub!(:body).and_return('{"ohai2u":"json_api"}')
@rest.api_request(:GET, @url, {}).should == {"ohai2u"=>"json_api"}
end
%w[ HTTPFound HTTPMovedPermanently HTTPSeeOther HTTPUseProxy HTTPTemporaryRedirect HTTPMultipleChoice ].each do |resp_name|
it "should call api_request again on a #{resp_name} response" do
resp_cls = Net.const_get(resp_name)
resp_code = Net::HTTPResponse::CODE_TO_OBJ.keys.detect { |k| Net::HTTPResponse::CODE_TO_OBJ[k] == resp_cls }
http_response = Net::HTTPFound.new("1.1", resp_code, "bob is somewhere else again")
http_response.add_field("location", @url.path)
http_response.stub!(:read_body)
@http_client.stub!(:request).and_yield(http_response).and_return(http_response)
lambda { @rest.api_request(:GET, @url) }.should raise_error(Chef::Exceptions::RedirectLimitExceeded)
end
end
it "should show the JSON error message on an unsuccessful request" do
http_response = Net::HTTPServerError.new("1.1", "500", "drooling from inside of mouth")
http_response.add_field("content-type", "application/json")
http_response.stub!(:body).and_return('{ "error":[ "Ears get sore!", "Not even four" ] }')
http_response.stub!(:read_body)
@rest.stub!(:sleep)
@http_client.stub!(:request).and_yield(http_response).and_return(http_response)
lambda {@rest.run_request(:GET, @url)}.should raise_error(Net::HTTPFatalError)
@log_stringio.string.should match(Regexp.escape('WARN: HTTP Request Returned 500 drooling from inside of mouth: Ears get sore!, Not even four'))
end
it "decompresses the JSON error message on an unsuccessful request" do
http_response = Net::HTTPServerError.new("1.1", "500", "drooling from inside of mouth")
http_response.add_field("content-type", "application/json")
http_response.add_field("content-encoding", "deflate")
unzipped_body = '{ "error":[ "Ears get sore!", "Not even four" ] }'
gzipped_body = Zlib::Deflate.deflate(unzipped_body, 1)
http_response.stub!(:body).and_return gzipped_body
http_response.stub!(:read_body)
@rest.stub!(:sleep)
@http_client.stub!(:request).and_yield(http_response).and_return(http_response)
lambda {@rest.run_request(:GET, @url)}.should raise_error(Net::HTTPFatalError)
@log_stringio.string.should match(Regexp.escape('WARN: HTTP Request Returned 500 drooling from inside of mouth: Ears get sore!, Not even four'))
end
it "should raise an exception on an unsuccessful request" do
http_response = Net::HTTPServerError.new("1.1", "500", "drooling from inside of mouth")
http_response.stub!(:body)
http_response.stub!(:read_body)
@rest.stub!(:sleep)
@http_client.stub!(:request).and_yield(http_response).and_return(http_response)
lambda {@rest.api_request(:GET, @url)}.should raise_error(Net::HTTPFatalError)
end
end
context "when streaming downloads to a tempfile" do
before do
@tempfile = Tempfile.open("chef-rspec-rest_spec-line-#{__LINE__}--")
Tempfile.stub!(:new).with("chef-rest").and_return(@tempfile)
@request_mock = {}
Net::HTTP::Get.stub!(:new).and_return(@request_mock)
@http_response = Net::HTTPSuccess.new("1.1",200, "it-works")
@http_response.stub!(:read_body)
@http_client.stub!(:request).and_yield(@http_response).and_return(@http_response)
end
after do
@tempfile.rspec_reset
@tempfile.close!
end
it " build a new HTTP GET request without the application/json accept header" do
expected_headers = {'X-Chef-Version' => Chef::VERSION, 'Accept-Encoding' => Chef::REST::RESTRequest::ENCODING_GZIP_DEFLATE}
Net::HTTP::Get.should_receive(:new).with("/?foo=bar", expected_headers).and_return(@request_mock)
@rest.streaming_request(@url, {})
end
it "returns a tempfile containing the streamed response body" do
@rest.streaming_request(@url, {}).should equal(@tempfile)
end
it "writes the response body to a tempfile" do
@http_response.stub!(:read_body).and_yield("real").and_yield("ultimate").and_yield("power")
@rest.streaming_request(@url, {})
IO.read(@tempfile.path).chomp.should == "realultimatepower"
end
it "closes the tempfile" do
@rest.streaming_request(@url, {})
@tempfile.should be_closed
end
it "yields the tempfile containing the streamed response body and then unlinks it when given a block" do
@http_response.stub!(:read_body).and_yield("real").and_yield("ultimate").and_yield("power")
tempfile_path = nil
@rest.streaming_request(@url, {}) do |tempfile|
tempfile_path = tempfile.path
File.exist?(tempfile.path).should be_true
IO.read(@tempfile.path).chomp.should == "realultimatepower"
end
File.exist?(tempfile_path).should be_false
end
it "does not raise a divide by zero exception if the content's actual size is 0" do
@http_response.add_field('Content-Length', "5")
@http_response.stub!(:read_body).and_yield('')
lambda { @rest.streaming_request(@url, {}) }.should_not raise_error(ZeroDivisionError)
end
it "does not raise a divide by zero exception when the Content-Length is 0" do
@http_response.add_field('Content-Length', "0")
@http_response.stub!(:read_body).and_yield("ninja")
lambda { @rest.streaming_request(@url, {}) }.should_not raise_error(ZeroDivisionError)
end
it "fetches a file and yields the tempfile it is streamed to" do
@http_response.stub!(:read_body).and_yield("real").and_yield("ultimate").and_yield("power")
tempfile_path = nil
@rest.fetch("cookbooks/a_cookbook") do |tempfile|
tempfile_path = tempfile.path
IO.read(@tempfile.path).chomp.should == "realultimatepower"
end
File.exist?(tempfile_path).should be_false
end
it "closes and unlinks the tempfile if there is an error while streaming the content to the tempfile" do
path = @tempfile.path
path.should_not be_nil
@tempfile.stub!(:write).and_raise(IOError)
@rest.fetch("cookbooks/a_cookbook") {|tmpfile| "shouldn't get here"}
File.exists?(path).should be_false
end
it "closes and unlinks the tempfile when the response is a redirect" do
Tempfile.rspec_reset
tempfile = mock("die", :path => "/tmp/ragefist", :close => true, :binmode => nil)
tempfile.should_receive(:close!).at_least(2).times
Tempfile.stub!(:new).with("chef-rest").and_return(tempfile)
http_response = Net::HTTPFound.new("1.1", "302", "bob is taking care of that one for me today")
http_response.add_field("location", @url.path)
http_response.stub!(:read_body)
@http_client.stub!(:request).and_yield(http_response).and_yield(@http_response).and_return(http_response, @http_response)
@rest.fetch("cookbooks/a_cookbook") {|tmpfile| "shouldn't get here"}
end
it "passes the original block to the redirected request" do
Tempfile.rspec_reset
http_response = Net::HTTPFound.new("1.1", "302", "bob is taking care of that one for me today")
http_response.add_field("location","/that-thing-is-here-now")
http_response.stub!(:read_body)
block_called = false
@http_client.stub!(:request).and_yield(@http_response).and_return(http_response, @http_response)
@rest.fetch("cookbooks/a_cookbook") do |tmpfile|
block_called = true
end
block_called.should be_true
end
end
end
context "when following redirects" do
before do
Chef::Config[:node_name] = "webmonkey.example.com"
Chef::Config[:client_key] = CHEF_SPEC_DATA + "/ssl/private_key.pem"
@rest = Chef::REST.new(@url)
end
it "raises a RedirectLimitExceeded when redirected more than 10 times" do
redirected = lambda {@rest.follow_redirect { redirected.call }}
lambda {redirected.call}.should raise_error(Chef::Exceptions::RedirectLimitExceeded)
end
it "does not count redirects from previous calls against the redirect limit" do
total_redirects = 0
redirected = lambda do
@rest.follow_redirect do
total_redirects += 1
redirected.call unless total_redirects >= 9
end
end
lambda {redirected.call}.should_not raise_error(Chef::Exceptions::RedirectLimitExceeded)
total_redirects = 0
lambda {redirected.call}.should_not raise_error(Chef::Exceptions::RedirectLimitExceeded)
end
it "does not sign the redirected request when sign_on_redirect is false" do
@rest.sign_on_redirect = false
@rest.follow_redirect { @rest.sign_requests?.should be_false }
end
it "resets sign_requests to the original value after following an unsigned redirect" do
@rest.sign_on_redirect = false
@rest.sign_requests?.should be_true
@rest.follow_redirect { @rest.sign_requests?.should be_false }
@rest.sign_requests?.should be_true
end
it "configures the redirect limit" do
total_redirects = 0
redirected = lambda do
@rest.follow_redirect do
total_redirects += 1
redirected.call unless total_redirects >= 9
end
end
lambda {redirected.call}.should_not raise_error(Chef::Exceptions::RedirectLimitExceeded)
total_redirects = 0
@rest.redirect_limit = 3
lambda {redirected.call}.should raise_error(Chef::Exceptions::RedirectLimitExceeded)
end
end
end
| 43.989147 | 152 | 0.671942 |
61e9419e8eb2881c6181e5764ff099c89da20520 | 2,752 | #
# = Capistrano database.yml task
#
# Provides a couple of tasks for creating the database.yml
# configuration file dynamically when deploy:setup is run.
#
# Category:: Capistrano
# Package:: Database
# Author:: Simone Carletti
# Copyright:: 2007-2009 The Authors
# License:: MIT License
# Link:: http://www.simonecarletti.com/
# Source:: http://gist.github.com/2769
#
#
unless Capistrano::Configuration.respond_to?(:instance)
abort "This extension requires Capistrano 2"
end
Capistrano::Configuration.instance.load do
namespace :db do
desc <<-DESC
Creates the database.yml configuration file in shared path.
By default, this task uses a template unless a template \
called database.yml.erb is found either is :template_dir \
or /config/deploy folders. The default template matches \
the template for config/database.yml file shipped with Rails.
When this recipe is loaded, db:setup is automatically configured \
to be invoked after deploy:setup. You can skip this task setting \
the variable :skip_db_setup to true. This is especially useful \
if you are using this recipe in combination with \
capistrano-ext/multistaging to avoid multiple db:setup calls \
when running deploy:setup for all stages one by one.
DESC
task :setup, :except => { :no_release => true } do
default_template = <<EOF
base: &base
adapter: sqlite3
timeout: 5000
development:
database: #{shared_path}/db/development.sqlite3
<<: *base
test:
database: #{shared_path}/db/test.sqlite3
<<: *base
production:
database: #{shared_path}/db/production.sqlite3
<<: *base
EOF
location = fetch(:template_dir, "config/deploy") + '/database.yml.erb'
template = File.file?(location) ? File.read(location) : default_template
config = ERB.new(template)
run "mkdir -p #{shared_path}/db"
run "mkdir -p #{shared_path}/config"
put config.result(binding), "#{shared_path}/config/database.yml.example"
run <<-CMD
test -e #{shared_path}/config/database.yml || {
cp -f #{shared_path}/config/database.yml.example #{shared_path}/config/database.yml &&
rm #{shared_path}/config/database.yml.example &&
chmod 600 #{shared_path}/config/database.yml; }
CMD
end
desc <<-DESC
[internal] Updates the symlink for database.yml file to the just deployed release.
DESC
task :symlink, :except => { :no_release => true } do
run "ln -nfs #{shared_path}/config/database.yml #{release_path}/config/database.yml"
end
end
after "deploy:setup", "db:setup" unless fetch(:skip_db_setup, false)
after "deploy:finalize_update", "db:symlink"
end
| 32 | 96 | 0.680596 |
1c45b30b32337725cd7ae03b1d1edfcc1f182962 | 1,490 | require 'rexml/document'
require 'rexml/formatters/transitive'
require File.dirname(__FILE__) + '/../../../spec_helper'
# Maybe this can be cleaned
describe "REXML::Document#write" do
before :each do
@d = REXML::Document.new
city = REXML::Element.new "Springfield"
street = REXML::Element.new "EvergreenTerrace"
address = REXML::Element.new "House742"
@d << city << street << address
@str = ""
end
it "returns document source as string" do
@d.write(@str)
@str.should == "<Springfield><EvergreenTerrace><House742/></EvergreenTerrace></Springfield>"
end
it "returns document indented" do
@d.write(@str, 2)
@str.should =~ /\s*<Springfield>\s*<EvergreenTerrace>\s*<House742\/>\s*<\/EvergreenTerrace>\s*<\/Springfield>/
end
# REXML in p114 is screwed up:
# Document#write uses wrong arity for Formatters::Transitive#initialize
#
# In branch_1_8 in rev 15833 REXML is organized completely differently.
# So we are waiting for further changes to REXML in 1.8.x branch.
ruby_bug "REXMLTracker#162", "1.8.6" do
it "returns document with transitive support" do
@d.write(@str, 2, true)
@str.should =~ "\s*<Springfield\s*><EvergreenTerrace\s*><House742\s*\/><\/EvergreenTerrace\s*><\/Springfield\s*>"
end
end
it "returns document with support for IE" do
@d.write(@str, -1, false, true)
@str.should == "<Springfield><EvergreenTerrace><House742 /></EvergreenTerrace></Springfield>"
end
end
| 34.651163 | 120 | 0.675168 |
910a98e6f9b7e2b5bb651e538012f4f188ef7931 | 246 | require 'test_helper'
class MainAppRootUrlTest < ActionDispatch::IntegrationTest
test "root_url link points to main_app.root_url" do
visit "/pointless_feedback"
assert page.has_link?("Homepage", :href => current_host + "/")
end
end
| 24.6 | 66 | 0.743902 |
79a4b30c24a8aefe6a2be3da07788ba756a151ae | 1,117 | require "rails_helper"
describe Admin::CommentsController, type: :routing do
describe "routing" do
it "routes to #index" do
expect(get("/a/comments")).to route_to("admin/comments#index")
end
it "routes to #show" do
expect(get("/a/comments/1")).to route_to("admin/comments#show", :id => "1")
end
it "routes to #edit" do
expect(get("/a/comments/1/edit")).to route_to("admin/comments#edit", :id => "1")
end
it "routes to #update" do
expect(put("/a/comments/1")).to route_to("admin/comments#update", :id => "1")
end
it "routes to #destroy" do
expect(delete("/a/comments/1")).to route_to("admin/comments#destroy", :id => "1")
end
it "routes to #destroy_batch" do
expect(delete("/a/comments/destroy-batch")).to route_to("admin/comments#destroy_batch")
end
it "routes to #approve" do
expect(put("/a/comments/1/approve")).to route_to("admin/comments#approve", :id => "1")
end
it "routes to #reject" do
expect(put("/a/comments/1/reject")).to route_to("admin/comments#reject", :id => "1")
end
end
end
| 27.925 | 93 | 0.623993 |
f7486aab890fe607f752204972ace90055e41574 | 910 | # encoding: UTF-8
#
# Cookbook Name:: dovecot
# Attributes:: conf_20_lmtp
# Author:: Xabier de Zuazo (<[email protected]>)
# Copyright:: Copyright (c) 2014 Onddo Labs, SL. (www.onddo.com)
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# conf.d/20-lmtp.conf
default['dovecot']['conf']['lmtp_proxy'] = nil
default['dovecot']['conf']['lmtp_save_to_detail_mailbox'] = nil
| 35 | 74 | 0.736264 |
1138087e41fd1b7d1d5a99bd7ca0dfce7770850d | 1,189 | # frozen_string_literal: true
module Jobs
class DiscourseAutomationTracker < ::Jobs::Scheduled
every 1.minute
BATCH_LIMIT = 300
def execute(_args = nil)
return unless SiteSetting.discourse_automation_enabled
DiscourseAutomation::PendingAutomation
.includes(:automation)
.limit(BATCH_LIMIT)
.where('execute_at < ?', Time.now)
.find_each { |pending_automation| run_pending_automation(pending_automation) }
DiscourseAutomation::PendingPm
.includes(:automation)
.limit(BATCH_LIMIT)
.where('execute_at < ?', Time.now)
.find_each { |pending_pm| send_pending_pm(pending_pm) }
end
def send_pending_pm(pending_pm)
DiscourseAutomation::Scriptable::Utils.send_pm(
pending_pm.attributes.slice('target_usernames', 'title', 'raw'),
sender: pending_pm.sender
)
pending_pm.destroy!
end
def run_pending_automation(pending_automation)
pending_automation.automation.trigger!(
'kind' => pending_automation.automation.trigger,
'execute_at' => pending_automation.execute_at
)
pending_automation.destroy!
end
end
end
| 27.022727 | 86 | 0.683768 |
bff7917337326f5aad51247be02a43d562c62e48 | 2,303 | # -*- encoding: utf-8 -*-
# stub: rake 12.3.3 ruby lib
Gem::Specification.new do |s|
s.name = "rake".freeze
s.version = "12.3.3"
s.required_rubygems_version = Gem::Requirement.new(">= 1.3.2".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Hiroshi SHIBATA".freeze, "Eric Hodel".freeze, "Jim Weirich".freeze]
s.bindir = "exe".freeze
s.date = "2019-07-22"
s.description = "Rake is a Make-like program implemented in Ruby. Tasks and dependencies are\nspecified in standard Ruby syntax.\nRake has the following features:\n * Rakefiles (rake's version of Makefiles) are completely defined in standard Ruby syntax.\n No XML files to edit. No quirky Makefile syntax to worry about (is that a tab or a space?)\n * Users can specify tasks with prerequisites.\n * Rake supports rule patterns to synthesize implicit tasks.\n * Flexible FileLists that act like arrays but know about manipulating file names and paths.\n * Supports parallel execution of tasks.\n".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze, "".freeze]
s.executables = ["rake".freeze]
s.files = ["exe/rake".freeze]
s.homepage = "https://github.com/ruby/rake".freeze
s.licenses = ["MIT".freeze]
s.rdoc_options = ["--main".freeze, "README.rdoc".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze)
s.rubygems_version = "3.2.15".freeze
s.summary = "Rake is a Make-like program implemented in Ruby".freeze
s.installed_by_version = "3.2.15" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_development_dependency(%q<bundler>.freeze, [">= 0"])
s.add_development_dependency(%q<minitest>.freeze, [">= 0"])
s.add_development_dependency(%q<rdoc>.freeze, [">= 0"])
s.add_development_dependency(%q<coveralls>.freeze, [">= 0"])
s.add_development_dependency(%q<rubocop>.freeze, [">= 0"])
else
s.add_dependency(%q<bundler>.freeze, [">= 0"])
s.add_dependency(%q<minitest>.freeze, [">= 0"])
s.add_dependency(%q<rdoc>.freeze, [">= 0"])
s.add_dependency(%q<coveralls>.freeze, [">= 0"])
s.add_dependency(%q<rubocop>.freeze, [">= 0"])
end
end
| 52.340909 | 613 | 0.699088 |
1a87e65e961e80111d2b1673538f63fa6acd27ff | 1,533 | Leapforloaves::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Configure static asset server for tests with Cache-Control for performance
config.serve_static_assets = true
config.static_cache_control = "public, max-age=3600"
# Log error messages when you accidentally call methods on nil
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Raise exception on mass assignment protection for Active Record models
config.active_record.mass_assignment_sanitizer = :strict
# Print deprecation notices to the stderr
config.active_support.deprecation = :stderr
end
| 40.342105 | 84 | 0.78604 |
114064103fbe1dcf8cf8bda15c6ba7df9040045a | 509 | # frozen_string_literal: true
module Eve
class AllianceCorporation < ApplicationRecord
# TODO: has_paper_trail
belongs_to :alliance,
primary_key: "alliance_id",
counter_cache: :corporations_count,
optional: true
belongs_to :corporation, primary_key: "corporation_id", optional: true
after_commit :eve_alliance_reset_characters_count, on: [:create, :update, :destroy]
def eve_alliance_reset_characters_count
alliance&.reset_characters_count
end
end
end
| 24.238095 | 87 | 0.746562 |
03711f105e27bb3867574f37aa1aa34c34d6e8f6 | 406 | require 'spec_helper'
describe "Event" do
it "forwards any defined event" do
def foo(msg, &block)
block.call_event :success
block.call_event :failure, "error"
end
expect{foo("my message") do |on|
on.success do
print "success"
end
on.failure do |msg|
print "\nfail:#{msg}"
end
end}.to output("success\nfail:error").to_stdout
end
end
| 22.555556 | 51 | 0.608374 |
0367256df356bf591456d3641fc9831ea13e1eb9 | 380 | require 'spec_helper'
module JSONAPIonify::Structure::Helpers
describe Errors do
describe '.new' do
pending
end
describe '.new' do
pending
end
describe '#add' do
pending
end
describe '#[]' do
pending
end
describe '#replace' do
pending
end
describe '#all_messages' do
pending
end
end
end
| 13.103448 | 39 | 0.584211 |
116f7cc6f3615440874b0a1683132bf987de9180 | 304 | module TableauRestApi
class Server < Base
attr_reader :version, :build , :api_version
def initialize(server)
product_version = server.productVersion
@version = product_version.value
@build = product_version.build
@api_version = server.restApiVersion
end
end
end
| 23.384615 | 47 | 0.707237 |
e2794e112c8fa328d8e773d42ae3be6d24c87f59 | 1,578 | =begin
#Accounting API
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 2.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for XeroRuby::HistoryRecord
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'HistoryRecord' do
before do
# run before each test
@instance = XeroRuby::HistoryRecord.new
end
after do
# run after each test
end
describe 'test an instance of HistoryRecord' do
it 'should create an instance of HistoryRecord' do
expect(@instance).to be_instance_of(XeroRuby::HistoryRecord)
end
end
describe 'test attribute "details"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "changes"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "user"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "date_utc"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 26.3 | 107 | 0.726869 |
bfcf290f46feefc438f24829d7e43143dbecbeaa | 269 | Rails.application.routes.draw do
resources :draws, except: [:edit, :update]
delete 'draws', to: 'draws#destroyall'
resources :cards, only: [:index, :show]
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
end
| 33.625 | 102 | 0.728625 |
2178e13838a543b1158e50f7c3f3aa375d800102 | 538 | module Shoppe
module AssociatedCountries
def self.included(base)
base.serialize :country_ids, Array
base.before_validation { self.country_ids = country_ids.map(&:to_i).select { |i| i > 0 } if country_ids.is_a?(Array) }
end
def country?(id)
id = id.id if id.is_a?(Shoppe::Country)
country_ids.is_a?(Array) && country_ids.include?(id.to_i)
end
def countries
return [] unless country_ids.is_a?(Array) && !country_ids.empty?
Shoppe::Country.where(id: country_ids)
end
end
end
| 28.315789 | 124 | 0.667286 |
f8426c9262dfd10cfbf1a78076f3d1ad8978f57c | 2,054 | require 'rails_helper'
require 'devise/jwt/test_helpers'
RSpec.describe 'Mod::UsersController', type: :controller do
describe 'success scenarios when current user is supervisor/moderator' do
before(:each) do
@controller = Mod::UsersController.new
supervisor = create(:user, :supervisor, email: '[email protected]')
sign_in supervisor
user = create(:user)
@email = user.email
@id = user.id
headers = { 'Accept' => 'application/json', 'Content-Type' => 'application/json' }
auth_headers = Devise::JWT::TestHelpers.auth_headers(headers, supervisor)
request.headers['Authorization'] = auth_headers
end
it 'GET :index' do
get :index,
params: { email: @email }
assert_response :success
end
it 'POST :create' do
post :create,
params: { user: { email: '[email protected]', password: 'password123&' } }
assert_response :success
end
it 'DESTROY :destroy' do
delete :destroy,
params: { id: @id }
assert_response :success
end
end
describe 'failure scenarios when current user is not supervisor/moderator' do
before(:each) do
@controller = Mod::UsersController.new
curr_user = create(:user, email: '[email protected]')
sign_in curr_user
user = create(:user)
@email = user.email
@id = user.id
headers = { 'Accept' => 'application/json', 'Content-Type' => 'application/json' }
auth_headers = Devise::JWT::TestHelpers.auth_headers(headers, curr_user)
request.headers['Authorization'] = auth_headers
end
it 'GET :index' do
get :index,
params: { email: @email }
assert_response :unauthorized
end
it 'POST :create' do
post :create,
params: { user: { email: '[email protected]', password: 'password123&' } }
assert_response :unauthorized
end
it 'DESTROY :destroy' do
delete :destroy,
params: { id: @id }
assert_response :unauthorized
end
end
end
| 30.205882 | 88 | 0.627556 |
019f6c298ff14541021e7072523739b3fcc610dd | 835 | module Fog
module Storage
class HP
class Real
# Generate a temporary url for an object
#
# ==== Parameters
# * container<~String> - Name of container
# * object<~String> - Name of object
# * expires<~Integer> - Time the temporary url expire in secs.
# * method<~String> - Allowed HTTP method GET, PUT, HEAD only
def get_object_temp_url(container, object, expires, method)
generate_object_temp_url(container, object, expires, method)
end
end
class Mock # :nodoc:all
def get_object_temp_url(container, object, expires, method)
@hp_storage_uri = "https://swift-cluster.example.com:443/v1/account"
generate_object_temp_url(container, object, expires, method)
end
end
end
end
end | 26.935484 | 78 | 0.619162 |
28b38a2526ed76407f8435d5196ce294337eb52d | 480 | module RockRMS
module Response
class TransactionDetail < Base
MAP = {
id: 'Id',
fee_amount: 'FeeAmount',
fund: 'Account',
fund_id: 'AccountId',
amount: 'Amount',
entity_type_id: 'EntityTypeId',
entity_id: 'EntityId'
}.freeze
def format_single(response)
response = to_h(MAP, response)
response[:fund] = Fund.format(response[:fund])
response
end
end
end
end
| 21.818182 | 54 | 0.558333 |
79d75397c3579ff71e8ff92ca98cd3875bab817f | 8,570 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2017 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
shared_examples_for 'acts_as_watchable included' do
before do
unless defined?(model_instance) &&
defined?(watch_permission) &&
defined?(project)
raise <<MESSAGE
This share example needs the following objects:
* model_instance: An instance of the watchable under test
* watch_permission: The symbol for the permission required for watching an instance
* project: the project the model_instance is in
MESSAGE
end
end
let(:watcher_role) do
permissions = is_public_permission ? [] : [watch_permission]
FactoryGirl.create(:role, permissions: permissions)
end
let(:non_watcher_role) { FactoryGirl.create(:role, permissions: []) }
let(:non_member_user) { FactoryGirl.create(:user) }
let(:user_with_permission) do
FactoryGirl.create(:user,
member_in_project: project,
member_through_role: watcher_role)
end
let(:locked_user_with_permission) do
FactoryGirl.create(:user,
status: Principal::STATUSES[:locked],
member_in_project: project,
member_through_role: watcher_role)
end
let(:user_wo_permission) do
if is_public_permission
FactoryGirl.create(:user)
else
FactoryGirl.create(:user,
member_in_project: project,
member_through_role: non_watcher_role)
end
end
let(:admin) { FactoryGirl.build(:admin) }
let(:anonymous_user) { FactoryGirl.build(:anonymous) }
let(:watching_user) do
FactoryGirl.create(:user,
member_in_project: project,
member_through_role: watcher_role).tap do |user|
Watcher.create(watchable: model_instance, user: user)
end
end
let(:is_public_permission) do
Redmine::AccessControl.public_permissions.map(&:name).include?(watch_permission)
end
shared_context 'non member role has the permission to watch' do
let(:non_member_role) do
unless is_public_permission
Role.non_member.add_permission! watch_permission
end
Role.non_member
end
before do
unless is_public_permission
non_member_role.add_permission! watch_permission
end
end
end
shared_context 'anonymous role has the permission to watch' do
let(:anonymous_role) do
permissions = is_public_permission ? [] : [watch_permission]
FactoryGirl.build :anonymous_role, permissions: permissions
end
before do
anonymous_role.save!
end
end
describe '#possible_watcher_users' do
subject { model_instance.possible_watcher_users }
before do
# in case the model_instance creates users, we do not want them
# to mess with our expected users
model_instance
User.destroy_all
Role.non_member
Role.anonymous
admin.save!
anonymous_user.save!
user_with_permission.save!
user_wo_permission.save!
locked_user_with_permission.save!
end
include_context 'non member role has the permission to watch'
include_context 'anonymous role has the permission to watch'
context 'when it is a public project' do
before do
project.update_attributes is_public: true
model_instance.reload
end
it 'contains all allowed to view' do
expected_users = [user_with_permission,
non_member_user,
admin]
expected_users << user_wo_permission if is_public_permission
expect(model_instance.possible_watcher_users)
.to match_array(expected_users)
end
end
context 'when it is a private project' do
before do
project.update_attributes is_public: false
model_instance.reload
end
it 'contains members allowed to view' do
expect(model_instance.possible_watcher_users)
.to match_array([user_with_permission])
end
end
end
describe '#watcher_recipients' do
before do
watching_user
model_instance.reload
end
subject { model_instance.watcher_recipients }
it 'has the watching user' do
is_expected.to match_array([watching_user])
end
context 'when the permission to watch has been removed' do
before do
if is_public_permission
watching_user.memberships.destroy_all
else
watcher_role.remove_permission! watch_permission
end
model_instance.reload
end
it 'is empty' do
is_expected.to match_array([])
end
end
end
describe '#watched_by?' do
before do
watching_user
model_instance.reload
end
subject { model_instance.watched_by?(watching_user) }
it 'is truthy' do
is_expected.to be_truthy
end
context 'when the permission to view work packages has been removed' do
# an existing watcher shouldn't be removed
before do
if is_public_permission
skip "Not applicable for #{model_instance.class} as #{watch_permission} " +
'is a public permission'
end
watcher_role.remove_permission! watch_permission
model_instance.reload
end
it { is_expected.to be_truthy }
end
end
describe '#addable_watcher_users' do
subject { model_instance.addable_watcher_users }
before do
# in case the model_instance creates users, we do not want them
# to mess with our expected users
model_instance
User.destroy_all
Role.non_member
Role.anonymous
admin.save!
anonymous_user.save!
user_with_permission.save!
user_wo_permission.save!
end
include_context 'non member role has the permission to watch'
include_context 'anonymous role has the permission to watch'
context 'when it is a public project' do
before do
project.update_attributes is_public: true
model_instance.reload
end
it 'contains all allowed to view' do
expected_users = [user_with_permission,
non_member_user,
admin]
expected_users << user_wo_permission if is_public_permission
is_expected
.to match_array(expected_users)
end
context 'when the user is already watching' do
before do
Watcher.create(watchable: model_instance, user: user_with_permission)
Watcher.create(watchable: model_instance, user: non_member_user)
end
it 'is no longer contained' do
expected_users = [admin]
expected_users << user_wo_permission if is_public_permission
is_expected
.to match_array(expected_users)
end
end
end
context 'when it is a private project' do
before do
project.update_attributes is_public: false
model_instance.reload
end
it 'contains members allowed to view' do
is_expected
.to match_array([user_with_permission])
end
context 'when the user is already watching' do
before do
Watcher.create(watchable: model_instance, user: user_with_permission)
end
it 'is no longer contained' do
is_expected
.to be_empty
end
end
end
end
end
| 28.566667 | 91 | 0.670828 |
1d87ffb129ab53ddeaf795bdbc39d93517f3477f | 649 | # frozen_string_literal: true
class RemoveClutterFromUser < ActiveRecord::Migration[6.1]
def up
change_table :users, bulk: true do |t|
t.remove :login_token
t.remove :login_token_valid_until
t.remove :remember_created_at
t.remove :confirmation_token
t.remove :confirmed_at
t.remove :confirmation_sent_at
end
end
def down
change_table :users, bulk: true do |t|
t.string :login_token
t.datetime :login_token_valid_until
t.datetime :remember_created_at
t.string :confirmation_token
t.datetime :confirmed_at
t.datetime :confirmation_sent_at
end
end
end
| 23.178571 | 58 | 0.701079 |
1ae24aa9fd6fac3aed0827adb4f95e82745fd9d2 | 2,734 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2018_01_01_preview
module Models
#
# Paged Property list representation.
#
class PropertyCollection
include MsRestAzure
include MsRest::JSONable
# @return [Array<PropertyContract>] Page values.
attr_accessor :value
# @return [String] Next page link if any.
attr_accessor :next_link
# return [Proc] with next page method call.
attr_accessor :next_method
#
# Gets the rest of the items for the request, enabling auto-pagination.
#
# @return [Array<PropertyContract>] operation results.
#
def get_all_items
items = @value
page = self
while page.next_link != nil && !page.next_link.strip.empty? do
page = page.get_next_page
items.concat(page.value)
end
items
end
#
# Gets the next page of results.
#
# @return [PropertyCollection] with next page content.
#
def get_next_page
response = @next_method.call(@next_link).value! unless @next_method.nil?
unless response.nil?
@next_link = response.body.next_link
@value = response.body.value
self
end
end
#
# Mapper for PropertyCollection class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'PropertyCollection',
type: {
name: 'Composite',
class_name: 'PropertyCollection',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'PropertyContractElementType',
type: {
name: 'Composite',
class_name: 'PropertyContract'
}
}
}
},
next_link: {
client_side_validation: true,
required: false,
serialized_name: 'nextLink',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 27.616162 | 80 | 0.523043 |
384edd7f5fdb0f2c39450221e16ac95983092fc4 | 635 | class AuthController < ApplicationController
skip_before_action :authorized, only: [:create]
def create
@user = User.find_by(username: user_login_params[:username])
if @user && @user.authenticate(user_login_params[:password])
token = encode_token({ user_id: @user.id })
render json: { user: UserSerializer.new(@user), jwt: token }, status: :accepted
else
render json: { message: 'Invalid username or password' }, status: :unauthorized
end
end
private
def user_login_params
params.require(:user).permit(:username, :password)
end
end
| 31.75 | 91 | 0.647244 |
e88afde81aaa8a16b06f835be8a29e7a6e5ac2b4 | 1,660 | class Volunteer
attr_reader(:id, :name, :project_id)
def initialize(attributes)
@name = attributes.fetch(:name)
@id = attributes.fetch(:id)
@project_id = attributes.fetch(:project_id)
end
def save
result = DB.exec("INSERT INTO volunteers (name, project_id) VALUES ('#{@name}', #{@project_id}) RETURNING id;")
@id = result.first().fetch("id").to_i
end
def ==(another_volunteer)
self.name().==(another_volunteer.name).&(self.id().==(another_volunteer.id())).&(self.project_id().==(another_volunteer.project_id()))
end
def self.all
returned_volunteers = DB.exec("SELECT * FROM volunteers;")
volunteers = []
returned_volunteers.each do |volunteer|
name = volunteer.fetch("name")
id = volunteer.fetch("id").to_i
project_id = volunteer.fetch("project_id").to_i
volunteers.push(Volunteer.new({:name => name, :project_id => project_id, :id => id}))
end
volunteers
end
#
def self.find(id)
returned_volunteers_by_id = DB.exec("SELECT * FROM volunteers WHERE id = #{id};")
volunteers = []
returned_volunteers_by_id.each do |volunteer|
name = volunteer.fetch("name")
id = volunteer.fetch("id").to_i
project_id = volunteer.fetch("project_id").to_i
volunteers.push(Volunteer.new({:name => name, :id => id, :project_id => project_id}))
end
volunteers[0]
end
def update(params)
@name = params[:name]
@project_id = params[:project_id]
DB.exec("UPDATE volunteers SET name = '#{@name}', project_id = #{@project_id} WHERE id = #{@id};")
end
def delete
DB.exec("DELETE FROM volunteers WHERE id = #{@id};")
end
end
| 30.740741 | 138 | 0.649398 |
4a7e687bd366b9125fb7cbb1780fa552a8aa68b6 | 10,893 | # -*- coding: utf-8 -*-
require "test_helper"
require "support/document_xml_helper"
require "support/xml_snippets"
class ProcessorTest < Sablon::TestCase
include DocumentXMLHelper
include XMLSnippets
def setup
super
@processor = Sablon::Processor
end
def test_simple_field_replacement
result = process(snippet("simple_field"), {"first_name" => "Jack"})
assert_equal "Hello! My Name is Jack , nice to meet you.", text(result)
assert_xml_equal <<-document, result
<w:p>
<w:r><w:t xml:space="preserve">Hello! My Name is </w:t></w:r>
<w:r w:rsidR="004B49F0">
<w:rPr><w:noProof/></w:rPr>
<w:t>Jack</w:t>
</w:r>
<w:r w:rsidR="00BE47B1"><w:t xml:space="preserve">, nice to meet you.</w:t></w:r>
</w:p>
document
end
def test_context_can_contain_string_and_symbol_keys
result = process(snippet("simple_fields"), {"first_name" => "Jack", last_name: "Davis"})
assert_equal "Jack Davis", text(result)
end
def test_complex_field_replacement
result = process(snippet("complex_field"), {"last_name" => "Zane"})
assert_equal "Hello! My Name is Zane , nice to meet you.", text(result)
assert_xml_equal <<-document, result
<w:p>
<w:r><w:t xml:space="preserve">Hello! My Name is </w:t></w:r>
<w:r w:rsidR="004B49F0">
<w:rPr><w:b/><w:noProof/></w:rPr>
<w:t>Zane</w:t>
</w:r>
<w:r w:rsidR="00BE47B1"><w:t xml:space="preserve">, nice to meet you.</w:t></w:r>
</w:p>
document
end
def test_complex_field_replacement_with_split_field
result = process(snippet("edited_complex_field"), {"first_name" => "Daniel"})
assert_equal "Hello! My Name is Daniel , nice to meet you.", text(result)
assert_xml_equal <<-document, result
<w:p>
<w:r><w:t xml:space="preserve">Hello! My Name is </w:t></w:r>
<w:r w:rsidR="00441382">
<w:rPr><w:noProof/></w:rPr>
<w:t>Daniel</w:t>
</w:r>
<w:r w:rsidR="00BE47B1"><w:t xml:space="preserve">, nice to meet you.</w:t></w:r>
</w:p>
document
end
def test_paragraph_block_replacement
result = process(snippet("paragraph_loop"), {"technologies" => ["Ruby", "Rails"]})
assert_equal "Ruby Rails", text(result)
assert_xml_equal <<-document, result
<w:p w14:paraId="1081E316" w14:textId="3EAB5FDC" w:rsidR="00380EE8" w:rsidRDefault="00380EE8" w:rsidP="007F5CDE">
<w:pPr>
<w:pStyle w:val="ListParagraph"/>
<w:numPr>
<w:ilvl w:val="0"/>
<w:numId w:val="1"/>
</w:numPr>
</w:pPr>
<w:r w:rsidR="009F01DA">
<w:rPr><w:noProof/></w:rPr>
<w:t>Ruby</w:t>
</w:r>
</w:p><w:p w14:paraId="1081E316" w14:textId="3EAB5FDC" w:rsidR="00380EE8" w:rsidRDefault="00380EE8" w:rsidP="007F5CDE">
<w:pPr>
<w:pStyle w:val="ListParagraph"/>
<w:numPr>
<w:ilvl w:val="0"/>
<w:numId w:val="1"/>
</w:numPr>
</w:pPr>
<w:r w:rsidR="009F01DA">
<w:rPr><w:noProof/></w:rPr>
<w:t>Rails</w:t>
</w:r>
</w:p>
document
end
def test_paragraph_block_within_table_cell
result = process(snippet("paragraph_loop_within_table_cell"), {"technologies" => ["Puppet", "Chef"]})
assert_equal "Puppet Chef", text(result)
assert_xml_equal <<-document, result
<w:tbl>
<w:tblGrid>
<w:gridCol w:w="2202"/>
</w:tblGrid>
<w:tr w:rsidR="00757DAD">
<w:tc>
<w:p>
<w:r w:rsidR="004B49F0">
<w:rPr><w:noProof/></w:rPr>
<w:t>Puppet</w:t>
</w:r>
</w:p>
<w:p>
<w:r w:rsidR="004B49F0">
<w:rPr><w:noProof/></w:rPr>
<w:t>Chef</w:t>
</w:r>
</w:p>
</w:tc>
</w:tr>
</w:tbl>
document
end
def test_paragraph_block_within_empty_table_cell_and_blank_replacement
result = process(snippet("paragraph_loop_within_table_cell"), {"technologies" => []})
assert_equal "", text(result)
assert_xml_equal <<-document, result
<w:tbl>
<w:tblGrid>
<w:gridCol w:w="2202"/>
</w:tblGrid>
<w:tr w:rsidR="00757DAD">
<w:tc>
<w:p></w:p>
</w:tc>
</w:tr>
</w:tbl>
document
end
def test_adds_blank_paragraph_to_empty_table_cells
result = process(snippet("corrupt_table"), {})
assert_xml_equal <<-document, result
<w:tbl>
<w:tblGrid>
<w:gridCol w:w="2202"/>
</w:tblGrid>
<w:tr w:rsidR="00757DAD">
<w:tc>
<w:p>
Hans
</w:p>
</w:tc>
<w:tc>
<w:tcPr>
<w:tcW w:w="5635" w:type="dxa"/>
</w:tcPr>
<w:p></w:p>
</w:tc>
</w:tr>
<w:tr w:rsidR="00757DAD">
<w:tc>
<w:tcPr>
<w:tcW w:w="2202" w:type="dxa"/>
</w:tcPr>
<w:p>
<w:r>
<w:rPr><w:noProof/></w:rPr>
<w:t>1.</w:t>
</w:r>
</w:p>
</w:tc>
<w:tc>
<w:p>
</w:p><w:p>
<w:r w:rsidR="004B49F0">
<w:rPr><w:noProof/></w:rPr>
<w:t>Chef</w:t>
</w:r>
</w:p>
</w:tc>
</w:tr>
</w:tbl>
document
end
def test_single_row_table_loop
item = Struct.new(:index, :label, :rating)
result = process(snippet("table_row_loop"), {"items" => [item.new("1.", "Milk", "***"), item.new("2.", "Sugar", "**")]})
assert_xml_equal <<-document, result
<w:tbl>
<w:tblPr>
<w:tblStyle w:val="TableGrid"/>
<w:tblW w:w="0" w:type="auto"/>
<w:tblLook w:val="04A0" w:firstRow="1" w:lastRow="0" w:firstColumn="1" w:lastColumn="0" w:noHBand="0" w:noVBand="1"/>
</w:tblPr>
<w:tblGrid>
<w:gridCol w:w="2202"/>
<w:gridCol w:w="4285"/>
<w:gridCol w:w="2029"/>
</w:tblGrid>
<w:tr w:rsidR="00757DAD" w14:paraId="1BD2E50A" w14:textId="77777777" w:rsidTr="006333C3">
<w:tc>
<w:tcPr>
<w:tcW w:w="2202" w:type="dxa"/>
</w:tcPr>
<w:p w14:paraId="41ACB3D9" w14:textId="77777777" w:rsidR="00757DAD" w:rsidRDefault="00757DAD" w:rsidP="006333C3">
<w:r>
<w:rPr><w:noProof/></w:rPr>
<w:t>1.</w:t>
</w:r>
</w:p>
</w:tc>
<w:tc>
<w:tcPr>
<w:tcW w:w="4285" w:type="dxa"/>
</w:tcPr>
<w:p w14:paraId="197C6F31" w14:textId="77777777" w:rsidR="00757DAD" w:rsidRDefault="00757DAD" w:rsidP="006333C3">
<w:r>
<w:rPr><w:noProof/></w:rPr>
<w:t>Milk</w:t>
</w:r>
</w:p>
</w:tc>
<w:tc>
<w:tcPr>
<w:tcW w:w="2029" w:type="dxa"/>
</w:tcPr>
<w:p w14:paraId="55C258BB" w14:textId="77777777" w:rsidR="00757DAD" w:rsidRDefault="00757DAD" w:rsidP="006333C3">
<w:r>
<w:rPr><w:noProof/></w:rPr>
<w:t>***</w:t>
</w:r>
</w:p>
</w:tc>
</w:tr><w:tr w:rsidR="00757DAD" w14:paraId="1BD2E50A" w14:textId="77777777" w:rsidTr="006333C3">
<w:tc>
<w:tcPr>
<w:tcW w:w="2202" w:type="dxa"/>
</w:tcPr>
<w:p w14:paraId="41ACB3D9" w14:textId="77777777" w:rsidR="00757DAD" w:rsidRDefault="00757DAD" w:rsidP="006333C3">
<w:r>
<w:rPr><w:noProof/></w:rPr>
<w:t>2.</w:t>
</w:r>
</w:p>
</w:tc>
<w:tc>
<w:tcPr>
<w:tcW w:w="4285" w:type="dxa"/>
</w:tcPr>
<w:p w14:paraId="197C6F31" w14:textId="77777777" w:rsidR="00757DAD" w:rsidRDefault="00757DAD" w:rsidP="006333C3">
<w:r>
<w:rPr><w:noProof/></w:rPr>
<w:t>Sugar</w:t>
</w:r>
</w:p>
</w:tc>
<w:tc>
<w:tcPr>
<w:tcW w:w="2029" w:type="dxa"/>
</w:tcPr>
<w:p w14:paraId="55C258BB" w14:textId="77777777" w:rsidR="00757DAD" w:rsidRDefault="00757DAD" w:rsidP="006333C3">
<w:r>
<w:rPr><w:noProof/></w:rPr>
<w:t>**</w:t>
</w:r>
</w:p>
</w:tc>
</w:tr>
</w:tbl>
document
end
def test_loop_over_collection_convertable_to_an_enumerable
style_collection = Class.new do
def to_ary
["CSS", "SCSS", "LESS"]
end
end
result = process(snippet("paragraph_loop"), {"technologies" => style_collection.new})
assert_equal "CSS SCSS LESS", text(result)
end
def test_loop_over_collection_not_convertable_to_an_enumerable_raises_error
not_a_collection = Class.new {}
assert_raises Sablon::ContextError do
process(snippet("paragraph_loop"), {"technologies" => not_a_collection.new})
end
end
def test_loop_with_missing_variable_raises_error
e = assert_raises Sablon::ContextError do
process(snippet("paragraph_loop"), {})
end
assert_equal "The expression «technologies» should evaluate to an enumerable but was: nil", e.message
end
def test_loop_with_missing_end_raises_error
e = assert_raises Sablon::TemplateError do
process(snippet("loop_without_ending"), {})
end
assert_equal "Could not find end field for «technologies:each(technology)». Was looking for «technologies:endEach»", e.message
end
def test_conditional_with_missing_end_raises_error
e = assert_raises Sablon::TemplateError do
process(snippet("conditional_without_ending"), {})
end
assert_equal "Could not find end field for «middle_name:if». Was looking for «middle_name:endIf»", e.message
end
def test_multi_row_table_loop
item = Struct.new(:index, :label, :body)
context = {"foods" => [item.new("1.", "Milk", "Milk is a white liquid."),
item.new("2.", "Sugar", "Sugar is the generalized name for carbohydrates.")]}
result = process(snippet("table_multi_row_loop"), context)
assert_equal "1. Milk Milk is a white liquid. 2. Sugar Sugar is the generalized name for carbohydrates.", text(result)
end
def test_conditional
result = process(snippet("conditional"), {"middle_name" => "Michael"})
assert_equal "Anthony Michael Hall", text(result)
result = process(snippet("conditional"), {"middle_name" => nil})
assert_equal "Anthony Hall", text(result)
end
def test_conditional_with_predicate
result = process(snippet("conditional_with_predicate"), {"body" => ""})
assert_equal "some content", text(result)
result = process(snippet("conditional_with_predicate"), {"body" => "not empty"})
assert_equal "", text(result)
end
private
def process(document, context)
@processor.process(wrap(document), context).to_xml
end
end
| 30.512605 | 130 | 0.550996 |
e834123f9c8694cabe81d2ddbe15b968ef17b557 | 192 | class MergeController < ApplicationController
param_accessible :a, :only => :create
param_accessible :b
param_accessible({ :h => :c}, :except => :update)
param_accessible :h => :b
end
| 27.428571 | 51 | 0.708333 |
f74682dd0ae3e445ff9dc55ce345cd08f9c4fe22 | 61 | module Houston::Tickets
module ApplicationHelper
end
end
| 12.2 | 26 | 0.803279 |
21fb6db3e14cea32c0a79dca82b86b57b1991776 | 281 | require 'hutch'
class LinkSetTitleConsumer
include Hutch::Consumer
consume 'link.set.title'
def process(message)
link = LinkRepository.find(message[:link_id])
title = Crawler.get_title(link.destination_url)
LinkRepository.update(link, title: title)
end
end
| 18.733333 | 51 | 0.743772 |
1de87c51c0fb0d339535aa970bf71a22ed5d88ea | 1,556 | class Pnpm < Formula
require "language/node"
desc "📦🚀 Fast, disk space efficient package manager"
homepage "https://pnpm.io/"
url "https://registry.npmjs.org/pnpm/-/pnpm-6.22.1.tgz"
sha256 "7aa88f16e4556006826f38812d01fe2adcc279d305ae37808e3e7a32e9eac1ee"
license "MIT"
livecheck do
url "https://registry.npmjs.org/pnpm/latest"
regex(/["']version["']:\s*?["']([^"']+)["']/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "c1db2819a1d641cd852713baae1477049a7fc67f5776f896475b47453f7aeaef"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "c1db2819a1d641cd852713baae1477049a7fc67f5776f896475b47453f7aeaef"
sha256 cellar: :any_skip_relocation, monterey: "05dc4da02d79c220d14296b73fa7c84be1e869f29c222254e7c6c0f8ce6a6386"
sha256 cellar: :any_skip_relocation, big_sur: "2814a80adfd9f0e44bca506db40e4519256e69e9e85421f8323369af0cd57c8c"
sha256 cellar: :any_skip_relocation, catalina: "2814a80adfd9f0e44bca506db40e4519256e69e9e85421f8323369af0cd57c8c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c1db2819a1d641cd852713baae1477049a7fc67f5776f896475b47453f7aeaef"
end
depends_on "node"
conflicts_with "corepack", because: "both installs `pnpm` and `pnpx` binaries"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
system "#{bin}/pnpm", "init", "-y"
assert_predicate testpath/"package.json", :exist?, "package.json must exist"
end
end
| 40.947368 | 123 | 0.758997 |
2670bee95b3339c44f281340d87e1cfc2191b28c | 1,373 | class OryHydra < Formula
desc "OpenID Certified OAuth 2.0 Server and OpenID Connect Provider"
homepage "https://www.ory.sh/hydra/"
url "https://github.com/ory/hydra/archive/v1.9.1.tar.gz"
sha256 "b97037ec2fd35eb91b81b4885a4f169540adb9d410101c3db56869a63f96e25e"
license "Apache-2.0"
bottle do
cellar :any_skip_relocation
sha256 "e81542a9bad2cd7de922cc634f38a3836ca5ba1abb98739b6c8ff735a66c702d" => :big_sur
sha256 "138f474ca11974be1e9f08a7d5efa94a6cac013e12a5ac1ce179af35ecdae9a9" => :arm64_big_sur
sha256 "2d26a741b1be1d4b66110299774f6d43c6521ea876eee0e55131f6e2294f1ffc" => :catalina
sha256 "39b5af96367b9cfd5e761809150d260de43cb997ee4053c5cce01f1bb3429b9b" => :mojave
end
depends_on "go" => :build
conflicts_with "hydra", because: "both install `hydra` binaries"
def install
ENV["GOBIN"] = bin
system "make", "install"
end
test do
admin_port = free_port
(testpath/"config.yaml").write <<~EOS
dsn: memory
serve:
public:
port: #{free_port}
admin:
port: #{admin_port}
EOS
fork { exec bin/"hydra", "serve", "all", "--config", "config.yaml" }
sleep 5
endpoint = "https://127.0.0.1:#{admin_port}/"
output = shell_output("#{bin}/hydra clients list --endpoint #{endpoint} --skip-tls-verify")
assert_match "| CLIENT ID |", output
end
end
| 31.204545 | 95 | 0.70284 |
e26bb7096a9933abb6102492f3716ac15fbbf115 | 3,296 | class Pgcli < Formula
desc "CLI for Postgres with auto-completion and syntax highlighting"
homepage "http://pgcli.com/"
url "https://pypi.python.org/packages/source/p/pgcli/pgcli-0.20.1.tar.gz"
sha256 "e645d21abf98303259bf588e9afa1bedf507f54ae27f78f0587cce98315421ab"
bottle do
cellar :any
sha256 "c61d08c167b473467e79954decaf7c3f8d78295ee7377958c1f536e6db99ae2c" => :el_capitan
sha256 "b8e88ed6f1a4c2ce8ca672a96c0e842236f69dc7ad5ff000eadc1fc21e494fc1" => :yosemite
sha256 "f83f82e184a724ddfb5ba21faab65cf7f5974fe32b534f1cff1a8b85f6f48041" => :mavericks
end
depends_on :python if MacOS.version <= :snow_leopard
depends_on "openssl"
depends_on :postgresql
resource "six" do
url "https://pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
resource "configobj" do
url "https://pypi.python.org/packages/source/c/configobj/configobj-5.0.6.tar.gz"
sha256 "a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902"
end
resource "sqlparse" do
url "https://pypi.python.org/packages/source/s/sqlparse/sqlparse-0.1.16.tar.gz"
sha256 "678c6c36ca4b01405177da8b84eecf92ec92c9f6c762396c965bb5d305f20f81"
end
resource "setproctitle" do
url "https://pypi.python.org/packages/source/s/setproctitle/setproctitle-1.1.9.tar.gz"
sha256 "1c3414d18f9cacdab78b0ffd8e886d56ad45f22e55001a72aaa0b2aeb56a0ad7"
end
resource "psycopg2" do
url "https://pypi.python.org/packages/source/p/psycopg2/psycopg2-2.6.1.tar.gz"
sha256 "6acf9abbbe757ef75dc2ecd9d91ba749547941abaffbe69ff2086a9e37d4904c"
end
resource "wcwidth" do
url "https://pypi.python.org/packages/source/w/wcwidth/wcwidth-0.1.5.tar.gz"
sha256 "66c7ce3199c87833aaaa1fe1241b63261ce53c1062597c189a16a54713e0919d"
end
resource "Pygments" do
url "https://pypi.python.org/packages/source/P/Pygments/Pygments-2.0.2.tar.gz"
sha256 "7320919084e6dac8f4540638a46447a3bd730fca172afc17d2c03eed22cf4f51"
end
resource "click" do
url "https://pypi.python.org/packages/source/c/click/click-5.1.tar.gz"
sha256 "678c98275431fad324275dec63791e4a17558b40e5a110e20a82866139a85a5a"
end
resource "prompt_toolkit" do
url "https://pypi.python.org/packages/source/p/prompt_toolkit/prompt_toolkit-0.46.tar.gz"
sha256 "1aa25cb9772e1e27d12f7920b5a514421ab763231067119bbd2f8b1574b409fb"
end
resource "pgspecial" do
url "https://pypi.python.org/packages/source/p/pgspecial/pgspecial-1.2.0.tar.gz"
sha256 "36ae9126f50fd146c96609b71a34ffa9122cfb72e658f46114c4cb8642530b17"
end
def install
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
%w[click prompt_toolkit psycopg2 pgspecial sqlparse Pygments wcwidth six setproctitle configobj].each do |r|
resource(r).stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
end
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python2.7/site-packages"
system "python", *Language::Python.setup_install_args(libexec)
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
system bin/"pgcli", "--help"
end
end
| 37.885057 | 112 | 0.773058 |
111d6e02d6d3c03a0351a5cf35a5d733ab58800a | 2,199 | module Suit # :nodoc:
module Models # :nodoc:
module Matchers
# Ensures that the model can scope by 'source'
# requires that the class have a factory and that a user factory exist
# Tests:
# scope :created_by, lambda { |item_object| {:conditions => ["items.source_id = ? AND items.source_type = ?", item_object.id, item_object.class.to_s] } }
# Examples:
# it { should scope_created_by }
def scope_source
CreatedByMatcher.new(:source, :source)
end
# Ensures that the model can scope by created_by
# requires that the class have a factory and that a user factory exist
# Tests:
# scope :created_by, lambda { |user| where(['user_id = ?', user.id]) } }
# Examples:
# it { should scope_created_by }
def scope_created_by
CreatedByMatcher.new(:created_by, :user)
end
# Ensures that the model can scope by created_by
# requires that the class have a factory and that a user factory exist
# Tests:
# scope :by_creator, lambda { |creator| where(['creator_id = ?', creator.id]) } }
# Examples:
# it { should scope_by_creator }
def scope_by_creator
CreatedByMatcher.new(:by_creator, :creator)
end
class CreatedByMatcher < SuitMatcherBase # :nodoc:
def initialize(scope, field)
@scope = scope
@field = field
end
def matches?(subject)
@subject = subject
@subject.class.delete_all
@user = FactoryGirl.create(:user)
@user1 = FactoryGirl.create(:user)
@item = FactoryGirl.create(factory_name, @field => @user)
@item1 = FactoryGirl.create(factory_name, @field => @user1)
items = @subject.class.send(@scope, @user)
items.include?(@item) && !items.include?(@item1)
end
def failure_message
"Expected #{factory_name} to have scope created_by and to be able to successfully find #{@subject}'s creator"
end
def description
"scope created_by"
end
end
end
end
end
| 33.318182 | 163 | 0.589359 |
1ca08bed07915b19a726761c9e742bc1ab8c2b27 | 1,592 | require "securerandom"
class AccessToken < ApplicationRecord
belongs_to :issuer, class_name: "Character"
belongs_to :grantee, polymorphic: true, optional: true
validate :event_owner_categories_must_be_valid
before_create :generate_token_if_needed
scope :personal, ->(c) { where(issuer: c, grantee: c) }
scope :current, -> {
where("expires_at > ? OR expires_at IS NULL", Time.current)
.where(revoked_at: nil)
}
class << self
def by_slug!(slug)
where(token: parse_slug(slug)).last!
end
def revoke!(access_token)
raise "Access token must be persisted" unless access_token.persisted?
where(issuer: access_token.issuer,
grantee: access_token.grantee,
revoked_at: nil)
.lock
.update_all(revoked_at: Time.current)
end
private
def parse_slug(slug)
slug.sub(/(private|public)-/i, "")
end
end
def slug
if personal?
"private-#{token}"
else
"public-#{token}"
end
end
def to_param
slug
end
def personal?
issuer == grantee
end
def public?
grantee.nil?
end
def revoked?
revoked_at.present?
end
def expired?
expires_at.present? && expires_at < Time.current
end
private
def event_owner_categories_must_be_valid
return true if event_owner_categories.blank?
if event_owner_categories.any? { |e| !Event::OWNER_CATEGORIES.include?(e) }
errors.add(:event_owner_categories, :invalid)
end
end
def generate_token_if_needed
self.token = SecureRandom.uuid if token.blank?
end
end
| 19.654321 | 79 | 0.668342 |
1d39551fa8ff798539e49557e9d6a62c2c6bceac | 3,961 | require "se/api/version"
require "se/api/types/answer"
require "se/api/types/question"
require "se/api/types/comment"
require "se/api/types/user"
require "se/api/types/post"
require "se/api/types/tag"
require "net/http"
require "json"
require "uri"
require "time"
require "logger"
module SE
module API
class Client
API_VERSION = 2.2
attr_reader :quota, :quota_used
attr_accessor :params
def initialize(key = "", log_api_raw: false, filter: '!*1_).BnZb8pdvWlZpJYNyauMekouxK9-RzUNUrwiB', log_api_json: false, log_meta: true, **params)
@key = key
@params = filter.to_s.size > 0 ? params.merge({filter: filter}) : params
@quota = nil
@quota_used = 0
@backoff = Time.now
@logger_raw = Logger.new 'api_raw.log'
@logger_json = Logger.new 'api_json.log'
@logger = Logger.new 'se-api.log'
@logger_raw.level = Logger::Severity::UNKNOWN unless log_api_raw
@logger_json.level = Logger::Severity::UNKNOWN unless log_api_json
@logger.level = Logger::Severity::UNKNOWN unless log_meta
end
def posts(*ids, **params)
objectify Post, ids, **params
end
def post(id, **params)
posts(id, **params).first
end
def questions(*ids, **params)
objectify Question, ids, **params
end
def question(id, **params)
questions(id, **params).first
end
def answers(*ids, **params)
objectify Answer, ids, **params
end
def answer(id, **params)
answers(id, **params).first
end
def comments(*ids, **params)
objectify Comment, ids, **params
end
def comment(id, **params)
comments(id, **params).first
end
def users(*ids, **params)
objectify User, ids, **params
end
def user(id, **params)
users(id, **params).first
end
def tags(*names, query_type: 'info', **params)
objectify Tag, names, **params.merge({uri_suffix: query_type})
end
def tag(name, **params)
tags(name, **params).first
end
def search(**params)
json('search', **params)
end
private
def objectify(type, ids = "", uri_prefix: nil, uri_suffix: nil, uri: nil, delimiter: ';', **params)
return if ids == ""
uri_prefix = "#{type.to_s.split('::').last.downcase}s" if uri_prefix.nil?
json([uri_prefix, Array(ids).join(delimiter), uri_suffix].reject(&:nil?).join('/'), **params).map do |i|
type.new(i)
end
end
def json(uri, **params)
params = @params.merge(params)
throw "No site specified" if params[:site].nil?
backoff_for = @backoff-Time.now
backoff_for = 0 if backoff_for <= 0
if backoff_for > 0
@logger.warn "Backing off for #{backoff_for}"
sleep backoff_for+2
@logger.warn "Finished backing off!"
end
params = @params.merge(params).merge({key: @key}).reject{|k, v| v.nil?}.map { |k,v| "#{k}=#{v}" }.join('&')
@logger.info "Posting to https://api.stackexchange.com/#{API_VERSION}/#{uri}?#{params}"
begin
resp_raw = Net::HTTP.get_response(URI("https://api.stackexchange.com/#{API_VERSION}/#{uri}?#{params}")).body
rescue Net::OpenTimeout, SocketError => e
@logger.warn "Got timeout on API request (#{e}). Retrying..."
puts "Got timeout on API request (#{e}). Retrying..."
sleep 0.3
retry
end
@logger_raw.info "https://api.stackexchange.com/#{API_VERSION}/#{uri}?#{params} => #{resp_raw}"
resp = JSON.parse(resp_raw)
@backoff = Time.now + resp["backoff"].to_i
@logger_json.info "https://api.stackexchange.com/#{API_VERSION}/#{uri}?#{params} => #{resp}"
@quota = resp["quota_remaining"]
@quota_used += 1
Array(resp["items"])
end
end
end
end
| 30.469231 | 151 | 0.586721 |
7adbc41b8419e7c1868f26e21d9884ad6dc37463 | 371 | require_relative '../../app/commands/report_robot_command.rb'
describe ReportRobotCommand do
let(:robot) { double('robot') }
let(:table) { double('table') }
let(:command) { "REPORT" }
subject { ReportRobotCommand.new(robot, table) }
it 'executes a REPORT command on the robot' do
expect(robot).to receive(:report)
subject.execute(command)
end
end
| 23.1875 | 61 | 0.695418 |
f826584d15ebb7f13005a9b3528f1b999ffdec0a | 6,695 | class Ocrmypdf < Formula
include Language::Python::Virtualenv
desc "Adds an OCR text layer to scanned PDF files"
homepage "https://github.com/jbarlow83/OCRmyPDF"
url "https://files.pythonhosted.org/packages/3b/6f/b35629fe70cd059f5c503e544d9857b94b91ed58b50e291a34a7f0d3813b/ocrmypdf-11.6.2.tar.gz"
sha256 "0f624456a50be0b0bc8c0b59704d159f637616c093a1cabe8bb383706561bcf7"
license "MPL-2.0"
bottle do
sha256 cellar: :any, big_sur: "5f1ebe7f6f57f4287d08396519d61876b9f66ed91e71708572ecf0db5e6a2c2e"
sha256 cellar: :any, catalina: "4346d71f5f395c289454aa5a9c0fd4295bf3ebef6fb779976c24521c1f41ef1a"
sha256 cellar: :any, mojave: "1f71d0c633234663731280dd4055f9bfe26f606897d653481a100908e04e1fac"
end
depends_on "pkg-config" => :build
depends_on "rust" => :build
depends_on "freetype"
depends_on "ghostscript"
depends_on "jbig2enc"
depends_on "jpeg"
depends_on "leptonica"
depends_on "libffi"
depends_on "libpng"
depends_on "pngquant"
depends_on "pybind11"
depends_on "[email protected]"
depends_on "qpdf"
depends_on "tesseract"
depends_on "unpaper"
uses_from_macos "libxml2"
uses_from_macos "libxslt"
uses_from_macos "zlib"
resource "cffi" do
url "https://files.pythonhosted.org/packages/a8/20/025f59f929bbcaa579704f443a438135918484fffaacfaddba776b374563/cffi-1.14.5.tar.gz"
sha256 "fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/ee/2d/9cdc2b527e127b4c9db64b86647d567985940ac3698eeabc7ffaccb4ea61/chardet-4.0.0.tar.gz"
sha256 "0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"
end
resource "coloredlogs" do
url "https://files.pythonhosted.org/packages/ce/ef/bfca8e38c1802896f67045a0c9ea0e44fc308b182dbec214b9c2dd54429a/coloredlogs-15.0.tar.gz"
sha256 "5e78691e2673a8e294499e1832bb13efcfb44a86b92e18109fa18951093218ab"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/60/6d/b32368327f600a12e59fb51a904fc6200dd7e65e953fd6fc6ae6468e3423/cryptography-3.4.5.tar.gz"
sha256 "4f6761a82b51fe02cda8f45af1c2f698a10f50003dc9c2572d8a49eda2e6d35b"
end
resource "humanfriendly" do
url "https://files.pythonhosted.org/packages/31/0e/a2e882aaaa0a378aa6643f4bbb571399aede7dbb5402d3a1ee27a201f5f3/humanfriendly-9.1.tar.gz"
sha256 "066562956639ab21ff2676d1fda0b5987e985c534fc76700a19bd54bcb81121d"
end
resource "img2pdf" do
url "https://files.pythonhosted.org/packages/80/ed/5167992abaf268f5a5867e974d9d36a8fa4802800898ec711f4e1942b4f5/img2pdf-0.4.0.tar.gz"
sha256 "eaee690ab8403dd1a9cb4db10afee41dd3e6c7ed63bdace02a0121f9feadb0c9"
end
resource "lxml" do
url "https://files.pythonhosted.org/packages/db/f7/43fecb94d66959c1e23aa53d6161231dca0e93ec500224cf31b3c4073e37/lxml-4.6.2.tar.gz"
sha256 "cd11c7e8d21af997ee8079037fff88f16fda188a9776eb4b81c7e4c9c0a7d7fc"
end
resource "pdfminer.six" do
url "https://files.pythonhosted.org/packages/d8/bb/45cb24e715d3058f92f703265e6ed396767b19fec6d19d1ea54e04b730b7/pdfminer.six-20201018.tar.gz"
sha256 "b9aac0ebeafb21c08bf65f2039f4b2c5f78a3449d0a41df711d72445649e952a"
end
resource "pikepdf" do
url "https://files.pythonhosted.org/packages/9c/a4/29b68c54886a1ce0cac3b7f8c65ef11ba2b50acf21e2dbefe29328edbefa/pikepdf-2.5.2.tar.gz"
sha256 "8fc3e97b24dafbd4b1ac057c8f144c18467ce55d342a1a8c42688890ead58ef3"
end
resource "Pillow" do
url "https://files.pythonhosted.org/packages/73/59/3192bb3bc554ccbd678bdb32993928cb566dccf32f65dac65ac7e89eb311/Pillow-8.1.0.tar.gz"
sha256 "887668e792b7edbfb1d3c9d8b5d8c859269a0f0eba4dda562adb95500f60dbba"
end
resource "pluggy" do
url "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz"
sha256 "15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz"
sha256 "2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"
end
resource "reportlab" do
url "https://files.pythonhosted.org/packages/87/42/770d5815606aebb808344c9d90f96f95474b7d87047fba68fc282639db2c/reportlab-3.5.59.tar.gz"
sha256 "a755cca2dcf023130b03bb671670301a992157d5c3151d838c0b68ef89894536"
end
resource "sortedcontainers" do
url "https://files.pythonhosted.org/packages/14/10/6a9481890bae97da9edd6e737c9c3dec6aea3fc2fa53b0934037b35c89ea/sortedcontainers-2.3.0.tar.gz"
sha256 "59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1"
end
resource "tqdm" do
url "https://files.pythonhosted.org/packages/d4/2a/99ac62cb012d81ffd96a2ddd9eba54d942c56f9244feb58b6a9a49d547d8/tqdm-4.56.2.tar.gz"
sha256 "11d544652edbdfc9cc41aa4c8a5c166513e279f3f2d9f1a9e1c89935b51de6ff"
end
def install
venv = virtualenv_create(libexec, Formula["[email protected]"].bin/"python3")
resource("Pillow").stage do
inreplace "setup.py" do |s|
sdkprefix = MacOS.sdk_path_if_needed ? MacOS.sdk_path : ""
s.gsub! "openjpeg.h", "probably_not_a_header_called_this_eh.h"
s.gsub! "xcb.h", "probably_not_a_header_called_this_eh.h"
s.gsub! "ZLIB_ROOT = None",
"ZLIB_ROOT = ('#{sdkprefix}/usr/lib', '#{sdkprefix}/usr/include')"
s.gsub! "JPEG_ROOT = None",
"JPEG_ROOT = ('#{Formula["jpeg"].opt_prefix}/lib', '#{Formula["jpeg"].opt_prefix}/include')"
s.gsub! "FREETYPE_ROOT = None",
"FREETYPE_ROOT = ('#{Formula["freetype"].opt_prefix}/lib', " \
"'#{Formula["freetype"].opt_prefix}/include')"
end
# avoid triggering "helpful" distutils code that doesn't recognize Xcode 7 .tbd stubs
unless MacOS::CLT.installed?
ENV.append "CFLAGS", "-I#{MacOS.sdk_path}/System/Library/Frameworks/Tk.framework/Versions/8.5/Headers"
end
venv.pip_install Pathname.pwd
end
# Fix "ld: file not found: /usr/lib/system/libsystem_darwin.dylib" for lxml
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version == :sierra
res = resources.map(&:name).to_set - ["Pillow"]
res.each do |r|
venv.pip_install resource(r)
end
venv.pip_install_and_link buildpath
bash_completion.install "misc/completion/ocrmypdf.bash" => "ocrmypdf"
fish_completion.install "misc/completion/ocrmypdf.fish"
end
test do
system "#{bin}/ocrmypdf", "-f", "-q", "--deskew",
test_fixtures("test.pdf"), "ocr.pdf"
assert_predicate testpath/"ocr.pdf", :exist?
end
end
| 43.474026 | 146 | 0.768633 |
6a6e3f4fc70ad7e634ff261078451eaf2b586834 | 11,116 | Rails.application.routes.draw do
# Pages and guides that have been renamed (and we don't want to break old URLs)
get "/docs/api", to: redirect("/docs/rest-api")
get "/docs/api/accounts", to: redirect("/docs/rest-api/organizations")
get "/docs/api/projects", to: redirect("/docs/rest-api/pipelines")
get "/docs/api/*page", to: redirect("/docs/rest-api/%{page}")
get "/docs/basics/pipelines", to: redirect("/docs/pipelines")
get "/docs/builds", to: redirect("/docs/tutorials")
get "/docs/builds/parallelizing-builds", to: redirect("/docs/tutorials/parallel-builds")
get "/docs/builds/scheduled-builds", to: redirect("/docs/pipelines/scheduled-builds")
get "/docs/builds/build-status-badges", to: redirect("/docs/integrations/build-status-badges")
get "/docs/builds/cc-menu", to: redirect("/docs/integrations/cc-menu")
get "/docs/builds/docker-containerized-builds", to: redirect("/docs/tutorials/docker-containerized-builds")
get "/docs/builds/*page", to: redirect("/docs/pipelines/%{page}")
get "/docs/graphql-api", to: redirect("/docs/apis/graphql-api")
get "/docs/guides/artifacts", to: redirect("/docs/pipelines/artifacts")
get "/docs/guides/branch-configuration", to: redirect("/docs/pipelines/branch-configuration")
get "/docs/guides/build-meta-data", to: redirect("/docs/pipelines/build-meta-data")
get "/docs/guides/build-status-badges", to: redirect("/docs/integrations/build-status-badges")
get "/docs/guides/cc-menu", to: redirect("/docs/integrations/cc-menu")
get "/docs/guides/collapsing-build-output", to: redirect("/docs/pipelines/managing-log-output#collapsing-output")
get "/docs/guides/controlling-concurrency", to: redirect("/docs/pipelines/controlling-concurrency")
get "/docs/guides/deploying-to-heroku", to: redirect("/docs/tutorials/deploying-to-heroku")
get "/docs/guides/docker-containerized-builds", to: redirect("/docs/tutorials/docker-containerized-builds")
get "/docs/guides/elastic-ci-stack-aws", to: redirect("/docs/tutorials/elastic-ci-stack-aws")
get "/docs/guides/environment-variables", to: redirect("/docs/pipelines/environment-variables")
get "/docs/guides/getting-started", to: redirect("/docs/tutorials")
get "/docs/guides/github-enterprise", to: redirect("/docs/integrations/github-enterprise")
get "/docs/guides/github-repo-access", to: redirect("/docs/agent/github-ssh-keys")
get "/docs/guides/gitlab", to: redirect("/docs/integrations/gitlab")
get "/docs/guides/images-in-build-output", to: redirect("/docs/pipelines/links-and-images-in-log-output")
get "/docs/pipelines/images-in-log-output", to: redirect("/docs/pipelines/links-and-images-in-log-output")
get "/docs/guides/managing-log-output", to: redirect("/docs/pipelines/managing-log-output")
get "/docs/guides/migrating-from-bamboo", to: redirect("/docs/tutorials/migrating-from-bamboo")
get "/docs/guides/parallelizing-builds", to: redirect("/docs/tutorials/parallel-builds")
get "/docs/guides/skipping-a-build", to: redirect("/docs/pipelines/ignoring-a-commit")
get "/docs/guides/uploading-pipelines", to: redirect("/docs/pipelines/defining-steps")
get "/docs/guides/writing-build-scripts", to: redirect("/docs/pipelines/writing-build-scripts")
get "/docs/how-tos", to: redirect("/docs/tutorials")
get "/docs/how-tos/bitbucket", to: redirect("/docs/integrations/bitbucket")
get "/docs/how-tos/github-enterprise", to: redirect("/docs/integrations/bitbucket")
get "/docs/how-tos/gitlab", to: redirect("/docs/integrations/gitlab")
get "/docs/how-tos/deploying-to-heroku", to: redirect("/docs/tutorials/deploying-to-heroku")
get "/docs/how-tos/migrating-from-bamboo", to: redirect("/docs/tutorials/migrating-from-bamboo")
get "/docs/projects", to: redirect("/docs/pipelines")
get "/docs/pipelines/pipelines", to: redirect("/docs/pipelines")
get "/docs/pipelines/parallel-builds", to: redirect("/docs/tutorials/parallel-builds")
get "/docs/pipelines/uploading-pipelines", to: redirect("/docs/pipelines/defining-steps")
get "/docs/webhooks/setup", to: redirect("/docs/apis/webhooks")
get "/docs/webhooks", to: redirect("/docs/apis/webhooks")
get "/docs/webhooks/*page", to: redirect("/docs/apis/webhooks/%{page}")
get "/docs/rest-api", to: redirect("/docs/apis/rest-api")
get "/docs/rest-api/*page", to: redirect("/docs/apis/rest-api/%{page}")
get "/docs/quickstart/*page", to: redirect("/docs/tutorials/%{page}")
get "/docs/agent/v3/plugins", to: redirect("/docs/pipelines/plugins")
get "/docs/tutorials/gitlab", to: redirect("/docs/integrations/gitlab")
get "/docs/tutorials/github-enterprise", to: redirect("/docs/integrations/github-enterprise")
get "/docs/tutorials/bitbucket", to: redirect("/docs/integrations/bitbucket")
get "/docs/tutorials/custom-saml", to: redirect("/docs/integrations/sso/custom-saml")
get "/docs/tutorials/sso-setup-with-graphql", to: redirect("/docs/integrations/sso/sso-setup-with-graphql")
get "/docs/integrations/sso/google-oauth", to: redirect("/docs/integrations/sso/g-suite")
get "/docs/integrations/sso/cloud-identity", to: redirect("/docs/integrations/sso/g-cloud-identity")
# Doc sections that don't have overview/index pages, so need redirecting
get "/docs/tutorials", to: redirect("/docs/tutorials/getting-started", status: 302)
get "/docs/integrations", to: redirect("/docs/integrations/github-enterprise", status: 302)
get "/docs/apis", to: redirect("/docs/apis/webhooks", status: 302)
# The old un-versioned URLs have a lot of Google juice, so we redirect them to
# the current version. But these are also linked from within the v2 agent
# command help, so we may add a notice saying 'Hey, maybe you're looking for
# v2?' after redirecting.
get "/docs/agent", to: redirect("/docs/agent/v3", status: 301)
get "/docs/agent/installation", to: redirect("/docs/agent/v3/installation", status: 301)
get "/docs/agent/ubuntu", to: redirect("/docs/agent/v3/ubuntu", status: 301)
get "/docs/agent/debian", to: redirect("/docs/agent/v3/debian", status: 301)
get "/docs/agent/redhat", to: redirect("/docs/agent/v3/redhat", status: 301)
get "/docs/agent/freebsd", to: redirect("/docs/agent/v3/freebsd", status: 301)
get "/docs/agent/osx", to: redirect("/docs/agent/v3/osx", status: 301)
get "/docs/agent/windows", to: redirect("/docs/agent/v3/windows", status: 301)
get "/docs/agent/linux", to: redirect("/docs/agent/v3/linux", status: 301)
get "/docs/agent/docker", to: redirect("/docs/agent/v3/docker", status: 301)
get "/docs/agent/aws", to: redirect("/docs/agent/v3/aws", status: 301)
get "/docs/agent/gcloud", to: redirect("/docs/agent/v3/gcloud", status: 301)
get "/docs/agent/configuration", to: redirect("/docs/agent/v3/configuration", status: 301)
get "/docs/agent/ssh-keys", to: redirect("/docs/agent/v3/ssh-keys", status: 301)
get "/docs/agent/github-ssh-keys", to: redirect("/docs/agent/v3/github-ssh-keys", status: 301)
get "/docs/agent/hooks", to: redirect("/docs/agent/v3/hooks", status: 301)
get "/docs/agent/queues", to: redirect("/docs/agent/v3/queues", status: 301)
get "/docs/agent/prioritization", to: redirect("/docs/agent/v3/prioritization", status: 301)
get "/docs/agent/plugins", to: redirect("/docs/agent/v3/plugins", status: 301)
get "/docs/agent/securing", to: redirect("/docs/agent/v3/securing", status: 301)
get "/docs/agent/cli-start", to: redirect("/docs/agent/v3/cli-start", status: 301)
get "/docs/agent/cli-meta-data", to: redirect("/docs/agent/v3/cli-meta-data", status: 301)
get "/docs/agent/cli-artifact", to: redirect("/docs/agent/v3/cli-artifact", status: 301)
get "/docs/agent/cli-pipeline", to: redirect("/docs/agent/v3/cli-pipeline", status: 301)
get "/docs/agent/agent-meta-data", to: redirect("/docs/agent/v3/cli-start#setting-metadata", status: 301)
get "/docs/agent/artifacts", to: redirect("/docs/agent/v3/cli-artifact", status: 301)
get "/docs/agent/build-artifacts", to: redirect("/docs/agent/v3/cli-artifact", status: 301)
get "/docs/agent/build-meta-data", to: redirect("/docs/agent/v3/cli-meta-data", status: 301)
get "/docs/agent/build-pipelines", to: redirect("/docs/agent/v3/cli-pipeline", status: 301)
get "/docs/agent/uploading-pipelines", to: redirect("/docs/agent/v3/cli-pipeline", status: 301)
get "/docs/agent/upgrading", to: redirect("/docs/agent/v3/upgrading", status: 301)
get "/docs/agent/upgrading-to-v3", to: redirect("/docs/agent/v3/upgrading", status: 301)
# Old docs routes that we changed around during the development of the v3 agent docs
get "/docs/agent/upgrading-to-v2", to: redirect("/docs/agent/v2/upgrading-to-v2", status: 301)
get "/docs/agent/v3/upgrading-to-v3", to: redirect("/docs/agent/v3/upgrading", status: 301)
get "/docs/agent/v2/plugins", to: redirect("/docs/agent/v3/plugins", status: 301)
get "/docs/agent/v2/agent-meta-data", to: redirect("/docs/agent/v2/cli-start#setting-metadata", status: 301)
get "/docs/agent/v3/agent-meta-data", to: redirect("/docs/agent/v3/cli-start#setting-tags", status: 301)
# All other standard docs pages
get "/docs/*path" => "pages#show", as: :docs_page
# Top level redirect. Needs to be at the end so it doesn't match /docs/sub-page
get "/docs", to: redirect("/docs/tutorials/getting-started", status: 302), as: :docs
# Take us straight to the docs when running standalone
root to: redirect("/docs")
end
| 91.114754 | 119 | 0.606243 |
0155d05529f9ee83600005bda20a6fda7c71b7de | 3,126 | # frozen_string_literal: true
module FatherlyAdvice
module SidekiqHelpers
class WorkSet
def self.build
new.build
end
def self.ttls
@ttls ||= {
default: 3.hours
}
end
def hosts
@hosts ||= {}
end
def each_host
hosts.values.each { |host| yield host }
end
def find_host_by(value)
each_host { |host| return host if host.match?(value) }
nil
end
def each_process
each_host do |host|
host.each_process { |process| yield process }
end
end
def find_process_by(value)
each_process { |process| return process if process.match?(value) }
nil
end
def each_worker
each_process do |process|
process.each_worker { |worker| yield worker }
end
end
def find_worker_by(value)
each_worker { |worker| return worker if worker.match?(value) }
nil
end
def ttls
@ttls ||= self.class.ttls.dup
end
def build
ensure_api_present
build_hosts
build_workers
apply_worker_limits
self
end
def process_set
ensure_api_present
Sidekiq::ProcessSet.new
end
delegate :cleanup, :size, :leader, to: :process_set, prefix: :process_set
def worker_set
ensure_api_present
Sidekiq::Workers.new
end
delegate :size, to: :worker_set, prefix: :worker_set
def rebuild
@hosts = {}
build
end
def stop_stuck!
each_host(&:stop_stuck!)
end
def report(size = 90)
each_host { |host| host.report size }
end
private
def ensure_api_present
sk_present = Object.constants.include? :Sidekiq
raise 'Sidekiq not present' unless sk_present
sk = Object.const_get :Sidekiq
ps_present = sk.constants.include? :ProcessSet
raise 'Sidekiq::ProcessSet not present' unless ps_present
ws_present = sk.constants.include? :Workers
raise 'Sidekiq::ProcessSet not present' unless ws_present
true
end
def build_hosts
process_set.each do |sidekiq_process|
process = Process.new sidekiq_process
hosts[process.hostname] ||= Host.new process.hostname
hosts[process.hostname].processes[process.identity] = process
end
end
def build_workers
worker_set.each do |process_id, thread_id, work|
worker = Worker.new process_id, thread_id, work
each_host do |host|
host.each_process do |process|
next unless process_id == process.identity
process.workers[worker.thread_id] = worker
end
end
end
end
def apply_worker_limits
each_host do |host|
host.each_process do |process|
process.each_worker do |worker|
worker.ttl = ttls[worker.klass] || ttls[:default]
end
end
end
end
end
end
end
| 22.489209 | 79 | 0.578375 |
bbe97c576bffd5fc7bbc8ca10ce2f1c6c30d67e3 | 361 | #! /usr/bin/env ruby
=begin
Problem 38 - Project Euler
http://projecteuler.net/index.php?section=problems&id=38
=end
class Integer
def pandigital_9digits?
self.to_s.chars.map(&:to_i).push(0).uniq.size == 10
end
end
puts (9183..9999).map {|i| [i, i*2].map(&:to_s).join.to_i }
.select(&:pandigital_9digits?)
.max
| 22.5625 | 59 | 0.617729 |
b9e658ac4aa6253eafc3f4df461dae4ccee90025 | 719 | require 'csv'
module Hathi
# Load all allow or deny rows from the given file into HathiAccess objects
# with the given origin
# expected header rows are
# oclcl, local_id, item_type, access, rights
class LoadAccessFile
attr_reader :input_location, :origin
def initialize(input_location:, origin:)
@input_location = input_location
@origin = origin
end
def load
CSV.foreach(input_location, headers: true, col_sep: "\t") do |row|
next unless row["access"]
HathiAccess.find_or_create_by!(
oclc_number: row["oclc"],
bibid: row["local_id"],
status: row["access"].upcase,
origin: origin
)
end
end
end
end
| 26.62963 | 76 | 0.641168 |
d58b54ac24a71450274cde33fc57bbdadee1a10a | 2,026 | require 'generators/generators_test_helper'
require 'rails/generators/channel/channel_generator'
class ChannelGeneratorTest < Rails::Generators::TestCase
include GeneratorsTestHelper
tests Rails::Generators::ChannelGenerator
def test_application_cable_skeleton_is_created
run_generator ['books']
assert_file "app/channels/application_cable/channel.rb" do |cable|
assert_match(/module ApplicationCable\n class Channel < ActionCable::Channel::Base\n/, cable)
end
assert_file "app/channels/application_cable/connection.rb" do |cable|
assert_match(/module ApplicationCable\n class Connection < ActionCable::Connection::Base\n/, cable)
end
end
def test_channel_is_created
run_generator ['chat']
assert_file "app/channels/chat_channel.rb" do |channel|
assert_match(/class ChatChannel < ApplicationCable::Channel/, channel)
end
assert_file "app/assets/javascripts/channels/chat.coffee" do |channel|
assert_match(/App.cable.subscriptions.create "ChatChannel"/, channel)
end
end
def test_channel_asset_is_not_created_when_skip_assets_is_passed
run_generator ['chat', '--skip-assets']
assert_file "app/channels/chat_channel.rb" do |channel|
assert_match(/class ChatChannel < ApplicationCable::Channel/, channel)
end
assert_no_file "app/assets/javascripts/channels/chat.coffee"
end
def test_cable_js_is_created_if_not_present_already
run_generator ['chat']
FileUtils.rm("#{destination_root}/app/assets/javascripts/cable.js")
run_generator ['camp']
assert_file "app/assets/javascripts/cable.js"
end
def test_channel_on_revoke
run_generator ['chat']
run_generator ['chat'], behavior: :revoke
assert_no_file "app/channels/chat_channel.rb"
assert_no_file "app/assets/javascripts/channels/chat.coffee"
assert_file "app/channels/application_cable/channel.rb"
assert_file "app/channels/application_cable/connection.rb"
assert_file "app/assets/javascripts/cable.js"
end
end
| 32.677419 | 106 | 0.762093 |
edf04b2da23e9df2e618237c95bf929831b4eb56 | 469 | # frozen_string_literal: true
# A class for managing the arithmetic and geometric means
# of positive numbers
module FastStats
class Means
attr_reader :means
def initialize
@means = {}
end
def add(name, val)
mean_for(name) << val
end
def summary(round: Mean::DEFAULT_ROUND)
means.transform_values { |m| m.summary round: 2 }
end
private
def mean_for(name)
@means[name] ||= Mean.new
end
end
end
| 15.633333 | 57 | 0.639659 |
eda49511f058506ed97e5cec282d04d7eea58745 | 3,329 | def source_paths
root_path = File.expand_path(File.dirname(__FILE__))
Array(super) + [root_path, File.join(root_path, 'padrino_root')]
end
project renderer: :slim, orm: :none, stylesheet: :none, test: :rspec
# copy over template files for Ruby
# - Gemfile w favourite gems
remove_file 'Gemfile'
copy_file 'padrino_root/Gemfile', 'Gemfile'
# copy dotfiles and any other development config
remove_file '.gitignore'
copy_file 'padrino_root/.gitignore', '.gitignore'
copy_file 'padrino_root/Guardfile', 'Guardfile'
copy_file 'padrino_root/.editorconfig', '.editorconfig'
copy_file 'padrino_root/.rspec', '.rspec'
copy_file 'padrino_root/.rubocop.yml', '.rubocop.yml'
copy_file 'padrino_root/.ruby-version', '.ruby-version'
copy_file 'padrino_root/Procfile', 'Procfile'
# Padrino app files, like config, controllers, views, etc
empty_directory 'app/views/pages'
empty_directory 'app/views/shared'
copy_file 'padrino_root/app/controllers/pages_controller.rb', 'app/controllers/pages_controller.rb'
copy_file 'padrino_root/app/controllers/enquiries_controller.rb', 'app/controllers/enquiries_controller.rb'
copy_file 'padrino_root/app/views/pages/index.html.slim', 'app/views/pages/index.html.slim'
copy_file 'padrino_root/config/unicorn.rb', 'config/unicorn.rb'
# front-end toolchain
# Bower for dependencies
empty_directory 'vendor/bower_components'
create_link './node_modules/bower_components', '../vendor/bower_components'
copy_file 'padrino_root/.bowerrc', '.bowerrc'
copy_file 'padrino_root/bower.json', 'bower.json'
# Gulp for task running, including ES6-compatible Browserify setup
copy_file 'padrino_root/package.json', 'package.json'
copy_file 'padrino_root/gulpfile.js', 'gulpfile.js'
# JS template
copy_file 'padrino_root/app/javascripts/main.js', 'app/javascripts/main.js'
copy_file 'padrino_root/app/javascripts/hero.js', 'app/javascripts/hero.js'
# SMCSS-inspired CSS architecture
copy_file 'padrino_root/app/stylesheets/main.scss', 'app/stylesheets/main.scss'
# CSS folder structure
%w(core components functions pages).each { |path| empty_directory File.join('app/stylesheets/', path) }
copy_file 'padrino_root/app/stylesheets/core/_reset.scss', 'app/stylesheets/core/_reset.scss'
[
['app/stylesheets/core/_grid.scss', '// Import and add your grids.'],
['app/stylesheets/core/_elements.scss', '// Add base styling to your HTML elements, if needed.'],
['app/stylesheets/core/_colors.scss', '// Add SASS variables for your colors.'],
['app/stylesheets/core/_dimensions.scss', '// Add SASS variables for your element sizes, converting from pixels if necessary (i.e. rem(24);)'],
['app/stylesheets/core/_typography.scss', '// Add SASS variables for your font families, sizes, and weights']
].each { |file_args| create_file(*file_args) }
empty_directory 'app/images/sprites'
# Specs folder structure
['spec/app', 'spec/requests', 'spec/support'].each { |path| empty_directory(path) }
# Run post-generator tasks, like installing dependencies
inside do
ruby_version = File.read(File.join(File.dirname(__FILE__), 'padrino_root/.ruby-version')).strip
run "~/.rbenv/versions/#{ruby_version}/bin/bundle install"
run 'npm install'
run 'node_modules/.bin/bower install'
# do an initial build of template JS
run './node_modules/.bin/gulp browserify'
run './node_modules/.bin/gulp sass'
end
| 42.679487 | 145 | 0.768099 |
e2cda74dade03c7d6b2182871b37fc630695651b | 12,923 | #
# Copyright 2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# need to make sure rpmbuild is installed
module Omnibus
class Packager::RPM < Packager::Base
# @return [Hash]
SCRIPT_MAP = {
# Default Omnibus naming
preinst: 'pre',
postinst: 'post',
prerm: 'preun',
postrm: 'postun',
# Default RPM naming
pre: 'pre',
post: 'post',
preun: 'preun',
postun: 'postun',
verifyscript: 'verifyscript',
pretans: 'pretans',
posttrans: 'posttrans',
}.freeze
id :rpm
setup do
# Create our magic directories
create_directory("#{staging_dir}/BUILD")
create_directory("#{staging_dir}/RPMS")
create_directory("#{staging_dir}/SRPMS")
create_directory("#{staging_dir}/SOURCES")
create_directory("#{staging_dir}/SPECS")
# Copy the full-stack installer into the SOURCE directory, accounting for
# any excluded files.
#
# /opt/hamlet => /tmp/daj29013/BUILD/opt/hamlet
destination = File.join(build_dir, project.install_dir)
FileSyncer.sync(project.install_dir, destination, exclude: exclusions)
# Copy over any user-specified extra package files.
#
# Files retain their relative paths inside the scratch directory, so
# we need to grab the dirname of the file, create that directory, and
# then copy the file into that directory.
#
# extra_package_file '/path/to/foo.txt' #=> /tmp/BUILD/path/to/foo.txt
project.extra_package_files.each do |file|
parent = File.dirname(file)
destination = File.join("#{staging_dir}/BUILD", parent)
create_directory(destination)
copy_file(file, destination)
end
end
build do
# Generate the spec
write_rpm_spec
# Generate the rpm
create_rpm_file
end
#
# @!group DSL methods
# --------------------------------------------------
#
# Set or return the signing passphrase. If this value is provided,
# Omnibus will attempt to sign the RPM.
#
# @example
# signing_passphrase "foo"
#
# @param [String] val
# the passphrase to use when signing the RPM
#
# @return [String]
# the RPM-signing passphrase
#
def signing_passphrase(val = NULL)
if null?(val)
@signing_passphrase
else
@signing_passphrase = val
end
end
expose :signing_passphrase
#
# Set or return the vendor who made this package.
#
# @example
# vendor "Seth Vargo <[email protected]>"
#
# @param [String] val
# the vendor who make this package
#
# @return [String]
# the vendor who make this package
#
def vendor(val = NULL)
if null?(val)
@vendor || 'Omnibus <[email protected]>'
else
unless val.is_a?(String)
raise InvalidValue.new(:vendor, 'be a String')
end
@vendor = val
end
end
expose :vendor
#
# Set or return the license for this package.
#
# @example
# license "Apache 2.0"
#
# @param [String] val
# the license for this package
#
# @return [String]
# the license for this package
#
def license(val = NULL)
if null?(val)
@license || 'unknown'
else
unless val.is_a?(String)
raise InvalidValue.new(:license, 'be a String')
end
@license = val
end
end
expose :license
#
# Set or return the priority for this package.
#
# @example
# priority "extra"
#
# @param [String] val
# the priority for this package
#
# @return [String]
# the priority for this package
#
def priority(val = NULL)
if null?(val)
@priority || 'extra'
else
unless val.is_a?(String)
raise InvalidValue.new(:priority, 'be a String')
end
@priority = val
end
end
expose :priority
#
# Set or return the category for this package.
#
# @example
# category "databases"
#
# @param [String] val
# the category for this package
#
# @return [String]
# the category for this package
#
def category(val = NULL)
if null?(val)
@category || 'default'
else
unless val.is_a?(String)
raise InvalidValue.new(:category, 'be a String')
end
@category = val
end
end
expose :category
#
# @!endgroup
# --------------------------------------------------
#
# @return [String]
#
def package_name
"#{safe_base_package_name}-#{safe_version}-#{safe_build_iteration}.#{safe_architecture}.rpm"
end
#
# The path to the +BUILD+ directory inside the staging directory.
#
# @return [String]
#
def build_dir
@build_dir ||= File.join(staging_dir, 'BUILD')
end
#
# Render an rpm spec file in +SPECS/#{name}.spec+ using the supplied ERB
# template.
#
# @return [void]
#
def write_rpm_spec
# Create a map of scripts that exist and their contents
scripts = SCRIPT_MAP.inject({}) do |hash, (source, destination)|
path = File.join(project.package_scripts_path, source.to_s)
if File.file?(path)
hash[destination] = File.read(path)
end
hash
end
# Exclude directories from the spec that are owned by the filesystem package:
# http://fedoraproject.org/wiki/Packaging:Guidelines#File_and_Directory_Ownership
filesystem_directories = IO.readlines(resource_path('filesystem_list'))
filesystem_directories.map! { |dirname| dirname.chomp }
# Get a list of user-declared config files
config_files = project.config_files.map { |file| rpm_safe(file) }
# Get a list of all files
files = FileSyncer.glob("#{build_dir}/**/*")
.map { |path| path.gsub("#{build_dir}/", '') }
.map { |path| "/#{path}" }
.map { |path| rpm_safe(path) }
.reject { |path| config_files.include?(path) }
.reject { |path| filesystem_directories.include?(path) }
render_template(resource_path('spec.erb'),
destination: spec_file,
variables: {
name: safe_base_package_name,
version: safe_version,
iteration: safe_build_iteration,
vendor: vendor,
license: license,
architecture: safe_architecture,
maintainer: project.maintainer,
homepage: project.homepage,
description: project.description,
priority: priority,
category: category,
conflicts: project.conflicts,
replaces: project.replaces,
dependencies: project.runtime_dependencies,
user: project.package_user,
group: project.package_group,
scripts: scripts,
config_files: config_files,
files: files,
}
)
end
#
# Generate the RPM file using +rpmbuild+. The use of the +fakeroot+ command
# is required so that the package is owned by +root:root+, but the build
# user does not need to have sudo permissions.
#
# @return [void]
#
def create_rpm_file
command = %|fakeroot rpmbuild|
command << %| -bb|
command << %| --buildroot #{staging_dir}/BUILD|
command << %| --define '_topdir #{staging_dir}'|
if signing_passphrase
log.info(log_key) { "Signing enabled for .rpm file" }
if File.exist?("#{ENV['HOME']}/.rpmmacros")
log.info(log_key) { "Detected .rpmmacros file at `#{ENV['HOME']}'" }
home = ENV['HOME']
else
log.info(log_key) { "Using default .rpmmacros file from Omnibus" }
# Generate a temporary home directory
home = Dir.mktmpdir
render_template(resource_path('rpmmacros.erb'),
destination: "#{home}/.rpmmacros",
variables: {
gpg_name: project.maintainer,
gpg_path: "#{ENV['HOME']}/.gnupg", # TODO: Make this configurable
}
)
end
command << " --sign"
command << " #{spec_file}"
with_rpm_signing do |signing_script|
log.info(log_key) { "Creating .rpm file" }
shellout!("#{signing_script} \"#{command}\"", environment: { 'HOME' => home })
end
else
log.info(log_key) { "Creating .rpm file" }
command << " #{spec_file}"
shellout!("#{command}")
end
FileSyncer.glob("#{staging_dir}/RPMS/**/*.rpm").each do |rpm|
copy_file(rpm, Config.package_dir)
end
end
#
# The full path to this spec file on disk.
#
# @return [String]
#
def spec_file
"#{staging_dir}/SPECS/#{package_name}.spec"
end
#
# Render the rpm signing script with secure permissions, call the given
# block with the path to the script, and ensure deletion of the script from
# disk since it contains sensitive information.
#
# @param [Proc] block
# the block to call
#
# @return [String]
#
def with_rpm_signing(&block)
directory = Dir.mktmpdir
destination = "#{directory}/sign-rpm"
render_template(resource_path('signing.erb'),
destination: destination,
mode: 0700,
variables: {
passphrase: signing_passphrase,
}
)
# Yield the destination to the block
block.call(destination)
ensure
remove_file(destination)
remove_directory(directory)
end
#
# Generate an RPM-safe name from the given string, doing the following:
#
# - Replace [ with [\[] to make rpm not use globs
# - Replace * with [*] to make rpm not use globs
# - Replace ? with [?] to make rpm not use globs
# - Replace % with [%] to make rpm not expand macros
#
# @param [String] string
# the string to sanitize
#
def rpm_safe(string)
string = "\"#{string}\"" if string[/\s/]
string.dup
.gsub("[", "[\\[]")
.gsub("*", "[*]")
.gsub("?", "[?]")
.gsub("%", "[%]")
end
#
# Return the RPM-ready base package name, converting any invalid characters to
# dashes (+-+).
#
# @return [String]
#
def safe_base_package_name
if project.package_name =~ /\A[a-z0-9\.\+\-]+\z/
project.package_name.dup
else
converted = project.package_name.downcase.gsub(/[^a-z0-9\.\+\-]+/, '-')
log.warn(log_key) do
"The `name' compontent of RPM package names can only include " \
"lowercase alphabetical characters (a-z), numbers (0-9), dots (.), " \
"plus signs (+), and dashes (-). Converting `#{project.package_name}' to " \
"`#{converted}'."
end
converted
end
end
#
# This is actually just the regular build_iternation, but it felt lonely
# among all the other +safe_*+ methods.
#
# @return [String]
#
def safe_build_iteration
project.build_iteration
end
#
# RPM package versions cannot contain dashes, so we will convert them to
# underscores.
#
# @return [String]
#
def safe_version
if project.build_version =~ /\A[a-zA-Z0-9\.\+\_]+\z/
project.build_version.dup
else
converted = project.build_version.gsub('-', '_')
log.warn(log_key) do
"The `version' component of RPM package names can only include " \
"alphabetical characters (a-z, A-Z), numbers (0-9), dots (.), " \
"plus signs (+), and underscores (_). Converting " \
"`#{project.build_version}' to `#{converted}'."
end
converted
end
end
#
# The architecture for this RPM package.
#
# @return [String]
#
def safe_architecture
case Ohai['kernel']['machine']
when 'armv6l'
if Ohai['platform'] == 'pidora'
'armv6hl'
else
'armv6l'
end
else
Ohai['kernel']['machine']
end
end
end
end
| 27.437367 | 98 | 0.569837 |
1cecddd92188ba6b19de9744d8e776235be97f9f | 1,110 | module Optimadmin
module ModuleHelper
def module_tooltip(tooltip_prompt_text, tooltip_content)
content_tag :span, "#{tooltip_prompt_text} <span class='octicon octicon-question'></span>".html_safe, data: { tooltip: '', aria_haspopup: true }, class: 'has-tip', title: tooltip_content
end
def list_item(classes = 'large-1 small-4 text-center', &content)
data = content_tag :div, class: "#{classes} columns" do
capture(&content)
end
data
end
def render_object(object)
if object.length >= 1
render object
else
raw 'There are no results.'
end
end
def pagination_helper(object, module_name)
return unless object.length >= 1
content_tag :div, (page_entries_info(object, entry_name: module_name) + '.'), class: 'pagination-information'
end
# http://stackoverflow.com/a/7830624
def required?(obj, attribute)
target = (obj.class == Class) ? obj : obj.class
target.validators_on(attribute).map(&:class).include?(
ActiveRecord::Validations::PresenceValidator)
end
end
end
| 30.833333 | 192 | 0.663964 |
1cb36011973b88afa08f68f195066a3f3889b0d2 | 323 | require 'spec_helper'
describe "spree/checkout/_summary.html.erb" do
# Regression spec for #4223
it "does not use the @order instance variable" do
order = stub_model(Spree::Order)
lambda do
render :partial => "spree/checkout/summary", :locals => {:order => order}
end.should_not raise_error
end
end | 29.363636 | 79 | 0.702786 |
116c9403028f3729fd6f0218bbcce007afccbe23 | 934 | class PostAppealsController < ApplicationController
before_action :member_only, :except => [:index, :show]
respond_to :html, :xml, :json, :js
def new
@post_appeal = PostAppeal.new(post_appeal_params)
respond_with(@post_appeal)
end
def index
@post_appeals = PostAppeal.includes(:creator).search(search_params).includes(post: [:appeals, :uploader, :approver])
@post_appeals = @post_appeals.paginate(params[:page], limit: params[:limit])
respond_with(@post_appeals) do |format|
format.xml do
render :xml => @post_appeals.to_xml(:root => "post-appeals")
end
end
end
def create
@post_appeal = PostAppeal.create(post_appeal_params)
respond_with(@post_appeal)
end
def show
@post_appeal = PostAppeal.find(params[:id])
respond_with(@post_appeal)
end
private
def post_appeal_params
params.fetch(:post_appeal, {}).permit(%i[post_id reason])
end
end
| 25.944444 | 120 | 0.702355 |
bb6beb4e4aa90e0a4a597b98153f31d87f065ded | 4,068 | # config/initializers/redis_memo.rb
RedisMemo.configure do |config|
# Passed along to the Rails
# {RedisCacheStore}[https://api.rubyonrails.org/classes/ActiveSupport/Cache/RedisCacheStore.html],
# sets the TTL on cache entries in Redis.
config.expires_in = 3.hours
# A global cache key version prepended to each cached entry. For example, the
# commit hash of the current version deployed to your application.
config.global_cache_key_version = ENV['HEROKU_SLUG_COMMIT']
config.redis_error_handler = proc do |error, operation, extra|
ErrorReporter.notify(error, tags: { operation: operation }, extra: extra)
end
# Object used to log RedisMemo operations.
config.logger { Rails.logger }
# Sets the tracer object. Allows the tracer to be dynamically determined at
# runtime if a blk is given.
config.tracer { Datadog.tracer }
# <url>,<url>...;<url>,...;...
redis_urls = ENV['REDIS_MEMO_REDIS_URLS']
if redis_urls.present?
# Set configuration values to pass to the Redis client. If multiple
# configurations are passed to this method, we assume that the first config
# corresponds to the primary node, and subsequent configurations correspond
# to replica nodes.
config.redis = redis_urls.split(';').map do |urls|
urls.split(',').map do |url|
{
url: url,
# All timeout values are specified in seconds
connect_timeout: ENV['REDIS_MEMO_CONNECT_TIMEOUT']&.to_f || 0.2,
read_timeout: ENV['REDIS_MEMO_READ_TIMEOUT']&.to_f || 0.5,
write_timeout: ENV['REDIS_MEMO_WRITE_TIMEOUT']&.to_f || 0.5,
reconnect_attempts: ENV['REDIS_MEMO_RECONNECT_ATTEMPTS']&.to_i || 0,
}
end
end
end
if !Rails.env.test?
thread_pool = Concurrent::ThreadPoolExecutor.new(
min_threads: 1,
max_threads: ENV['REDIS_MEMO_MAX_THREADS']&.to_i || 20,
max_queue: 100,
# If we're overwhelmed, discard the current invalidation request and
# retry later
fallback_policy: :discard,
auto_terminate: false,
)
# A handler used to asynchronously perform cache writes and invalidations.
# If no value is provided, RedisMemo will perform these operations
# synchronously.
config.async = proc do |&blk|
# Skip async in rails console since the async logs would interfere with
# the console outputs
if defined?(Rails::Console)
blk.call
else
thread_pool.post { blk.call }
end
end
end
unless ENV['REDIS_MEMO_CONNECTION_POOL_SIZE'].nil?
# Configuration values for connecting to RedisMemo using a connection pool.
# It's recommended to use a connection pool in multi-threaded applications,
# or when an async handler is set.
config.connection_pool = {
size: ENV['REDIS_MEMO_CONNECTION_POOL_SIZE'].to_i,
timeout: ENV['REDIS_MEMO_CONNECTION_POOL_TIMEOUT']&.to_i || 0.2,
}
end
# Specify the global sampled percentage of the chance to call the cache
# validation, a value between 0 to 100, when the value is 100, it will call
# the handler every time the cached result does not match the uncached result
# You can also specify inline cache validation sample percentage by
# memoize_method :method, cache_validation_sample_percentage: #{value}
config.cache_validation_sample_percentage = ENV['REDIS_MEMO_CACHE_VALIDATION_SAMPLE_PERCENTAGE']&.to_i
# Handler called when the cached result does not match the uncached result
# (sampled at the `cache_validation_sample_percentage`). This might indicate
# that invalidation is happening too slowly or that there are incorrect
# dependencies specified on a cached method.
config.cache_out_of_date_handler = proc do |ref, method_id, args, cached_result, fresh_result|
ErrorReporter.notify(
"Cache does not match its current value: #{method_id}",
tags: { method_id: method_id },
extra: {
self: ref.to_s,
args: args,
cached_result: cached_result,
fresh_result: fresh_result,
},
)
end
end
| 39.495146 | 104 | 0.705506 |
bf1b0c6059ddecffb2c134f936683a88855b6f35 | 4,004 | #--
# Copyright (c) 2005-2011, John Mettraux, [email protected]
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Made in Japan.
#++
module Ruote::Exp
#
# This expression executes its children expression according to a cron
# schedule or at a given frequency.
#
# cron '15 4 * * sun' do # every sunday at 0415
# subprocess :ref => 'refill_the_acid_baths'
# end
#
# or
#
# every '10m' do
# send_reminder # subprocess or participant
# end
#
# The 'tab' or 'interval' attributes may be used, this is a bit more verbose,
# but, for instance, in XML, it is quite necessary :
#
# <cron tab="15 4 * * sun">
# <subprocess ref="refill_the_acid_baths" />
# <cron>
#
# Triggered children subprocesses are 'forgotten'. This implies they
# will never reply to the cron/every expression and they won't get cancelled
# when the cron/every expression gets cancelled (the cron/every schedule
# gets cancelled though, no new children will get cancelled).
#
# "man 5 crontab" in the command line of your favourite unix system might
# help you with the semantics of the string expected by the cron expression.
#
#
# == an example use case
#
# The cron/every expression appears often in scenarii like :
#
# concurrence :count => 1 do
#
# participant 'operator'
#
# cron '0 9 * * 1-5' do # send a reminder every weekday at 0900
# notify 'operator'
# end
# end
#
# With a subprocess, this could become a bit more reusable :
#
# Ruote.process_defintion :name => 'sample' do
#
# sequence do
# with_reminder :participant => 'operator1'
# with_reminder :participant => 'operator2'
# end
#
# define 'with_reminder' do
# concurrence :count => 1 do
# participant '${v:participant}'
# cron '0 9 * * 1-5' do # send a reminder every weekday at 0900
# notify '${v:participant}'
# end
# end
# end
# end
#
class CronExpression < FlowExpression
names :cron, :every
def apply
h.schedule = attribute(:tab) || attribute(:interval) || attribute_text
reschedule
end
def reply(workitem)
launch_sub(
"#{h.fei['expid']}_0",
tree_children[0],
:workitem => Ruote.fulldup(h.applied_workitem),
:forget => true)
reschedule
end
# Note : this method has to be public.
#
def reschedule
h.schedule_id = @context.storage.put_schedule(
'cron',
h.fei,
h.schedule,
'action' => 'reply',
'fei' => h.fei,
'workitem' => h.applied_workitem)
@context.storage.delete_schedule(h.schedule_id) if try_persist
#
# if the persist failed, immediately unschedule
# the just scheduled job
#
# this is meant to cope with cases where one worker reschedules
# while another just cancelled
end
end
end
| 29.659259 | 79 | 0.651598 |
ffecd4f15d46bff6f8898fd98170e8f197f49e2e | 5,054 | class UtilLinux < Formula
desc "Collection of Linux utilities"
homepage "https://github.com/util-linux/util-linux"
url "https://mirrors.edge.kernel.org/pub/linux/utils/util-linux/v2.38/util-linux-2.38.tar.xz"
sha256 "6d111cbe4d55b336db2f1fbeffbc65b89908704c01136371d32aa9bec373eb64"
license all_of: [
"BSD-3-Clause",
"BSD-4-Clause-UC",
"GPL-2.0-only",
"GPL-2.0-or-later",
"GPL-3.0-or-later",
"LGPL-2.1-or-later",
:public_domain,
]
revision 1
bottle do
sha256 arm64_monterey: "c9653e2676398fa31ef966b4708dd77598d940567ae98bf8a358bf6e3f9795d3"
sha256 arm64_big_sur: "af364bc9ec694952b1d7a24754c06b42dbf29a6d177b6a17d2f6d0142b9c996f"
sha256 monterey: "1d3d53424843429c548fbbe26708ff3fc70c61fa77e431f15586ec85a86e43f5"
sha256 big_sur: "16e6c5fc7d5f68666b8d418180d76cf6df8ac085935836747e9ba4a81a3684cb"
sha256 catalina: "9a4c50aca114732ebdca292bff3d11074824dc7d8991421dc9d57ff35d8c5bc6"
sha256 x86_64_linux: "79b4830d6c2b45d991a81915e7f340c21ddc7e6c4b189916791aaab296a30517"
end
keg_only :shadowed_by_macos, "macOS provides the uuid.h header"
depends_on "asciidoctor" => :build
depends_on "gettext"
uses_from_macos "libxcrypt"
uses_from_macos "ncurses"
uses_from_macos "zlib"
# Everything in following macOS block is for temporary patches
# TODO: Remove in the next release.
on_macos do
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "gtk-doc" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
# Fix ./include/statfs_magic.h:4:10: fatal error: 'sys/statfs.h' file not found
patch do
url "https://github.com/util-linux/util-linux/commit/478b9d477ecdd8f4e3a7b488524e1d4c6a113525.patch?full_index=1"
sha256 "576c26c3d15642f1c44548d0120b192b855cceeebf8ad97fb5e300350e88a3f7"
end
# Fix lib/procfs.c:9:10: fatal error: 'sys/vfs.h' file not found
patch do
url "https://github.com/util-linux/util-linux/commit/3671d4a878fb58aa953810ecf9af41809317294f.patch?full_index=1"
sha256 "d38c9ae06c387da151492dd5862c58551559dd6d2b1877c74cc1e11754221fe4"
end
end
on_linux do
conflicts_with "bash-completion", because: "both install `mount`, `rfkill`, and `rtcwake` completions"
conflicts_with "rename", because: "both install `rename` binaries"
end
def install
# Temporary work around for patches. Remove in the next release.
system "autoreconf", "--force", "--install", "--verbose" if OS.mac?
args = %w[--disable-silent-rules]
if OS.mac?
args << "--disable-ipcs" # does not build on macOS
args << "--disable-ipcrm" # does not build on macOS
args << "--disable-wall" # already comes with macOS
args << "--disable-libmount" # does not build on macOS
args << "--enable-libuuid" # conflicts with ossp-uuid
else
args << "--disable-use-tty-group" # Fix chgrp: changing group of 'wall': Operation not permitted
args << "--disable-kill" # Conflicts with coreutils.
args << "--disable-cal" # Conflicts with bsdmainutils
args << "--without-systemd" # Do not install systemd files
args << "--with-bashcompletiondir=#{bash_completion}"
args << "--disable-chfn-chsh"
args << "--disable-login"
args << "--disable-su"
args << "--disable-runuser"
args << "--disable-makeinstall-chown"
args << "--disable-makeinstall-setuid"
args << "--without-python"
end
system "./configure", *std_configure_args, *args
system "make", "install"
# install completions only for installed programs
Pathname.glob("bash-completion/*") do |prog|
bash_completion.install prog if (bin/prog.basename).exist? || (sbin/prog.basename).exist?
end
end
def caveats
linux_only_bins = %w[
addpart agetty
blkdiscard blkzone blockdev
chcpu chmem choom chrt ctrlaltdel
delpart dmesg
eject
fallocate fdformat fincore findmnt fsck fsfreeze fstrim
hwclock
ionice ipcrm ipcs
kill
last ldattach losetup lsblk lscpu lsipc lslocks lslogins lsmem lsns
mount mountpoint
nsenter
partx pivot_root prlimit
raw readprofile resizepart rfkill rtcwake
script scriptlive setarch setterm sulogin swapoff swapon switch_root
taskset
umount unshare utmpdump uuidd
wall wdctl
zramctl
]
on_macos do
<<~EOS
The following tools are not supported for macOS, and are therefore not included:
#{Formatter.columns(linux_only_bins)}
EOS
end
end
test do
stat = File.stat "/usr"
owner = Etc.getpwuid(stat.uid).name
group = Etc.getgrgid(stat.gid).name
flags = ["x", "w", "r"] * 3
perms = flags.each_with_index.reduce("") do |sum, (flag, index)|
sum.insert 0, ((stat.mode & (2 ** index)).zero? ? "-" : flag)
end
out = shell_output("#{bin}/namei -lx /usr").split("\n").last.split
assert_equal ["d#{perms}", owner, group, "usr"], out
end
end
| 35.591549 | 119 | 0.688959 |
330585f3645a751c18b4df8357b371c2792b8bcc | 15,788 | # frozen_string_literal: true
require_relative '../table'
require 'set'
module TTFunk
class Table
class OS2 < Table
attr_reader :version
attr_reader :ave_char_width
attr_reader :weight_class
attr_reader :width_class
attr_reader :type
attr_reader :y_subscript_x_size
attr_reader :y_subscript_y_size
attr_reader :y_subscript_x_offset
attr_reader :y_subscript_y_offset
attr_reader :y_superscript_x_size
attr_reader :y_superscript_y_size
attr_reader :y_superscript_x_offset
attr_reader :y_superscript_y_offset
attr_reader :y_strikeout_size
attr_reader :y_strikeout_position
attr_reader :family_class
attr_reader :panose
attr_reader :char_range
attr_reader :vendor_id
attr_reader :selection
attr_reader :first_char_index
attr_reader :last_char_index
attr_reader :ascent
attr_reader :descent
attr_reader :line_gap
attr_reader :win_ascent
attr_reader :win_descent
attr_reader :code_page_range
attr_reader :x_height
attr_reader :cap_height
attr_reader :default_char
attr_reader :break_char
attr_reader :max_context
CODE_PAGE_BITS = {
1252 => 0, 1250 => 1, 1251 => 2, 1253 => 3, 1254 => 4,
1255 => 5, 1256 => 6, 1257 => 7, 1258 => 8, 874 => 16,
932 => 17, 936 => 18, 949 => 19, 950 => 20, 1361 => 21,
10_000 => 29, 869 => 48, 866 => 49, 865 => 50, 864 => 51,
863 => 52, 862 => 53, 861 => 54, 860 => 55, 857 => 56,
855 => 57, 852 => 58, 775 => 59, 737 => 60, 708 => 61,
850 => 62, 437 => 63
}.freeze
UNICODE_BLOCKS = {
(0x0000..0x007F) => 0, (0x0080..0x00FF) => 1,
(0x0100..0x017F) => 2, (0x0180..0x024F) => 3,
(0x0250..0x02AF) => 4, (0x1D00..0x1D7F) => 4,
(0x1D80..0x1DBF) => 4, (0x02B0..0x02FF) => 5,
(0xA700..0xA71F) => 5, (0x0300..0x036F) => 6,
(0x1DC0..0x1DFF) => 6, (0x0370..0x03FF) => 7,
(0x2C80..0x2CFF) => 8, (0x0400..0x04FF) => 9,
(0x0500..0x052F) => 9, (0x2DE0..0x2DFF) => 9,
(0xA640..0xA69F) => 9, (0x0530..0x058F) => 10,
(0x0590..0x05FF) => 11, (0xA500..0xA63F) => 12,
(0x0600..0x06FF) => 13, (0x0750..0x077F) => 13,
(0x07C0..0x07FF) => 14, (0x0900..0x097F) => 15,
(0x0980..0x09FF) => 16, (0x0A00..0x0A7F) => 17,
(0x0A80..0x0AFF) => 18, (0x0B00..0x0B7F) => 19,
(0x0B80..0x0BFF) => 20, (0x0C00..0x0C7F) => 21,
(0x0C80..0x0CFF) => 22, (0x0D00..0x0D7F) => 23,
(0x0E00..0x0E7F) => 24, (0x0E80..0x0EFF) => 25,
(0x10A0..0x10FF) => 26, (0x2D00..0x2D2F) => 26,
(0x1B00..0x1B7F) => 27, (0x1100..0x11FF) => 28,
(0x1E00..0x1EFF) => 29, (0x2C60..0x2C7F) => 29,
(0xA720..0xA7FF) => 29, (0x1F00..0x1FFF) => 30,
(0x2000..0x206F) => 31, (0x2E00..0x2E7F) => 31,
(0x2070..0x209F) => 32, (0x20A0..0x20CF) => 33,
(0x20D0..0x20FF) => 34, (0x2100..0x214F) => 35,
(0x2150..0x218F) => 36, (0x2190..0x21FF) => 37,
(0x27F0..0x27FF) => 37, (0x2900..0x297F) => 37,
(0x2B00..0x2BFF) => 37, (0x2200..0x22FF) => 38,
(0x2A00..0x2AFF) => 38, (0x27C0..0x27EF) => 38,
(0x2980..0x29FF) => 38, (0x2300..0x23FF) => 39,
(0x2400..0x243F) => 40, (0x2440..0x245F) => 41,
(0x2460..0x24FF) => 42, (0x2500..0x257F) => 43,
(0x2580..0x259F) => 44, (0x25A0..0x25FF) => 45,
(0x2600..0x26FF) => 46, (0x2700..0x27BF) => 47,
(0x3000..0x303F) => 48, (0x3040..0x309F) => 49,
(0x30A0..0x30FF) => 50, (0x31F0..0x31FF) => 50,
(0x3100..0x312F) => 51, (0x31A0..0x31BF) => 51,
(0x3130..0x318F) => 52, (0xA840..0xA87F) => 53,
(0x3200..0x32FF) => 54, (0x3300..0x33FF) => 55,
(0xAC00..0xD7AF) => 56, (0xD800..0xDFFF) => 57,
(0x10900..0x1091F) => 58, (0x4E00..0x9FFF) => 59,
(0x2E80..0x2EFF) => 59, (0x2F00..0x2FDF) => 59,
(0x2FF0..0x2FFF) => 59, (0x3400..0x4DBF) => 59,
(0x20000..0x2A6DF) => 59, (0x3190..0x319F) => 59,
(0xE000..0xF8FF) => 60, (0x31C0..0x31EF) => 61,
(0xF900..0xFAFF) => 61, (0x2F800..0x2FA1F) => 61,
(0xFB00..0xFB4F) => 62, (0xFB50..0xFDFF) => 63,
(0xFE20..0xFE2F) => 64, (0xFE10..0xFE1F) => 65,
(0xFE30..0xFE4F) => 65, (0xFE50..0xFE6F) => 66,
(0xFE70..0xFEFF) => 67, (0xFF00..0xFFEF) => 68,
(0xFFF0..0xFFFF) => 69, (0x0F00..0x0FFF) => 70,
(0x0700..0x074F) => 71, (0x0780..0x07BF) => 72,
(0x0D80..0x0DFF) => 73, (0x1000..0x109F) => 74,
(0x1200..0x137F) => 75, (0x1380..0x139F) => 75,
(0x2D80..0x2DDF) => 75, (0x13A0..0x13FF) => 76,
(0x1400..0x167F) => 77, (0x1680..0x169F) => 78,
(0x16A0..0x16FF) => 79, (0x1780..0x17FF) => 80,
(0x19E0..0x19FF) => 80, (0x1800..0x18AF) => 81,
(0x2800..0x28FF) => 82, (0xA000..0xA48F) => 83,
(0xA490..0xA4CF) => 83, (0x1700..0x171F) => 84,
(0x1720..0x173F) => 84, (0x1740..0x175F) => 84,
(0x1760..0x177F) => 84, (0x10300..0x1032F) => 85,
(0x10330..0x1034F) => 86, (0x10400..0x1044F) => 87,
(0x1D000..0x1D0FF) => 88, (0x1D100..0x1D1FF) => 88,
(0x1D200..0x1D24F) => 88, (0x1D400..0x1D7FF) => 89,
(0xF0000..0xFFFFD) => 90, (0x100000..0x10FFFD) => 90,
(0xFE00..0xFE0F) => 91, (0xE0100..0xE01EF) => 91,
(0xE0000..0xE007F) => 92, (0x1900..0x194F) => 93,
(0x1950..0x197F) => 94, (0x1980..0x19DF) => 95,
(0x1A00..0x1A1F) => 96, (0x2C00..0x2C5F) => 97,
(0x2D30..0x2D7F) => 98, (0x4DC0..0x4DFF) => 99,
(0xA800..0xA82F) => 100, (0x10000..0x1007F) => 101,
(0x10080..0x100FF) => 101, (0x10100..0x1013F) => 101,
(0x10140..0x1018F) => 102, (0x10380..0x1039F) => 103,
(0x103A0..0x103DF) => 104, (0x10450..0x1047F) => 105,
(0x10480..0x104AF) => 106, (0x10800..0x1083F) => 107,
(0x10A00..0x10A5F) => 108, (0x1D300..0x1D35F) => 109,
(0x12000..0x123FF) => 110, (0x12400..0x1247F) => 110,
(0x1D360..0x1D37F) => 111, (0x1B80..0x1BBF) => 112,
(0x1C00..0x1C4F) => 113, (0x1C50..0x1C7F) => 114,
(0xA880..0xA8DF) => 115, (0xA900..0xA92F) => 116,
(0xA930..0xA95F) => 117, (0xAA00..0xAA5F) => 118,
(0x10190..0x101CF) => 119, (0x101D0..0x101FF) => 120,
(0x102A0..0x102DF) => 121, (0x10280..0x1029F) => 121,
(0x10920..0x1093F) => 121, (0x1F030..0x1F09F) => 122,
(0x1F000..0x1F02F) => 122
}.freeze
UNICODE_MAX = 0xFFFF
UNICODE_RANGES = UNICODE_BLOCKS.keys.freeze
LOWERCASE_START = 'a'.ord
LOWERCASE_END = 'z'.ord
LOWERCASE_COUNT = (LOWERCASE_END - LOWERCASE_START) + 1
CODEPOINT_SPACE = 32
SPACE_GLYPH_MISSING_ERROR = "Space glyph (0x#{CODEPOINT_SPACE.to_s(16)})"\
' must be included in the font'
# Used to calculate the xAvgCharWidth field.
# From https://docs.microsoft.com/en-us/typography/opentype/spec/os2:
#
# "When first defined, the specification was biased toward Basic Latin
# characters, and it was thought that the xAvgCharWidth value could be
# used to estimate the average length of lines of text. A formula for
# calculating xAvgCharWidth was provided using frequency-of-use
# weighting factors for lowercase letters a - z."
#
# The array below contains 26 weight values which correspond to the
# 26 letters in the Latin alphabet. Each weight is the relative
# frequency of that letter in the English language.
WEIGHT_SPACE = 166
WEIGHT_LOWERCASE = [
64, 14, 27, 35, 100, 20, 14, 42, 63, 3, 6, 35, 20,
56, 56, 17, 4, 49, 56, 71, 31, 10, 18, 3, 18, 2
].freeze
def tag
'OS/2'
end
class << self
def encode(os2, subset)
''.b.tap do |result|
result << [
os2.version, avg_char_width_for(os2, subset), os2.weight_class,
os2.width_class, os2.type, os2.y_subscript_x_size,
os2.y_subscript_y_size, os2.y_subscript_x_offset,
os2.y_subscript_y_offset, os2.y_superscript_x_size,
os2.y_superscript_y_size, os2.y_superscript_x_offset,
os2.y_superscript_y_offset, os2.y_strikeout_size,
os2.y_strikeout_position, os2.family_class
].pack('n*')
result << os2.panose
new_char_range = unicode_blocks_for(os2, os2.char_range, subset)
result << BinUtils
.slice_int(
new_char_range.value,
bit_width: 32,
slice_count: 4
)
.pack('N*')
result << os2.vendor_id
new_cmap_table = subset.new_cmap_table[:charmap]
code_points = new_cmap_table
.keys
.select { |k| new_cmap_table[k][:new] > 0 }
.sort
# "This value depends on which character sets the font supports.
# This field cannot represent supplementary character values
# (codepoints greater than 0xFFFF). Fonts that support
# supplementary characters should set the value in this field
# to 0xFFFF."
first_char_index = [code_points.first || 0, UNICODE_MAX].min
last_char_index = [code_points.last || 0, UNICODE_MAX].min
result << [
os2.selection, first_char_index, last_char_index
].pack('n*')
if os2.version > 0
result << [
os2.ascent, os2.descent, os2.line_gap,
os2.win_ascent, os2.win_descent
].pack('n*')
result << BinUtils
.slice_int(
code_pages_for(subset).value,
bit_width: 32,
slice_count: 2
)
.pack('N*')
if os2.version > 1
result << [
os2.x_height, os2.cap_height, os2.default_char,
os2.break_char, os2.max_context
].pack('n*')
end
end
end
end
private
def code_pages_for(subset)
field = BitField.new(0)
return field if subset.unicode?
field.on(CODE_PAGE_BITS[subset.code_page])
field
end
def unicode_blocks_for(os2, original_field, subset)
field = BitField.new(0)
return field unless subset.unicode?
subset_code_points = Set.new(subset.new_cmap_table[:charmap].keys)
original_code_point_groups = group_original_code_points_by_bit(os2)
original_code_point_groups.each do |bit, code_points|
next if original_field.off?(bit)
if code_points.any? { |cp| subset_code_points.include?(cp) }
field.on(bit)
end
end
field
end
def group_original_code_points_by_bit(os2)
Hash.new { |h, k| h[k] = [] }.tap do |result|
os2.file.cmap.unicode.first.code_map.each_key do |code_point|
# find corresponding bit
range = UNICODE_RANGES.find { |r| r.cover?(code_point) }
if (bit = UNICODE_BLOCKS[range])
result[bit] << code_point
end
end
end
end
def avg_char_width_for(os2, subset)
if subset.microsoft_symbol?
avg_ms_symbol_char_width_for(os2, subset)
else
avg_weighted_char_width_for(os2, subset)
end
end
def avg_ms_symbol_char_width_for(os2, subset)
total_width = 0
num_glyphs = 0
# use new -> old glyph mapping in order to include compound glyphs
# in the calculation
subset.new_to_old_glyph.each do |_, old_gid|
if (metric = os2.file.horizontal_metrics.for(old_gid))
total_width += metric.advance_width
num_glyphs += 1 if metric.advance_width > 0
end
end
return 0 if num_glyphs == 0
total_width / num_glyphs # this should be a whole number
end
def avg_weighted_char_width_for(os2, subset)
# make sure the subset includes the space char
unless subset.to_unicode_map[CODEPOINT_SPACE]
raise SPACE_GLYPH_MISSING_ERROR
end
space_gid = os2.file.cmap.unicode.first[CODEPOINT_SPACE]
space_hm = os2.file.horizontal_metrics.for(space_gid)
return 0 unless space_hm
total_weight = space_hm.advance_width * WEIGHT_SPACE
num_lowercase = 0
# calculate the weighted sum of all the lowercase widths in
# the subset
LOWERCASE_START.upto(LOWERCASE_END) do |lowercase_cp|
# make sure the subset includes the character
next unless subset.to_unicode_map[lowercase_cp]
lowercase_gid = os2.file.cmap.unicode.first[lowercase_cp]
lowercase_hm = os2.file.horizontal_metrics.for(lowercase_gid)
num_lowercase += 1
total_weight += lowercase_hm.advance_width *
WEIGHT_LOWERCASE[lowercase_cp - 'a'.ord]
end
# return if all lowercase characters are present in the subset
return total_weight / 1000 if num_lowercase == LOWERCASE_COUNT
# If not all lowercase characters are present in the subset, take
# the average width of all the subsetted characters. This differs
# from avg_ms_char_width_for in that it includes zero-width glyphs
# in the calculation.
total_width = 0
num_glyphs = subset.new_to_old_glyph.size
# use new -> old glyph mapping in order to include compound glyphs
# in the calculation
subset.new_to_old_glyph.each do |_, old_gid|
if (metric = os2.file.horizontal_metrics.for(old_gid))
total_width += metric.advance_width
end
end
return 0 if num_glyphs == 0
total_width / num_glyphs # this should be a whole number
end
end
private
def parse!
@version = read(2, 'n').first
@ave_char_width = read_signed(1).first
@weight_class, @width_class = read(4, 'nn')
@type, @y_subscript_x_size, @y_subscript_y_size, @y_subscript_x_offset,
@y_subscript_y_offset, @y_superscript_x_size, @y_superscript_y_size,
@y_superscript_x_offset, @y_superscript_y_offset, @y_strikeout_size,
@y_strikeout_position, @family_class = read_signed(12)
@panose = io.read(10)
@char_range = BitField.new(
BinUtils.stitch_int(read(16, 'N*'), bit_width: 32)
)
@vendor_id = io.read(4)
@selection, @first_char_index, @last_char_index = read(6, 'n*')
if @version > 0
@ascent, @descent, @line_gap = read_signed(3)
@win_ascent, @win_descent = read(4, 'nn')
@code_page_range = BitField.new(
BinUtils.stitch_int(read(8, 'N*'), bit_width: 32)
)
if @version > 1
@x_height, @cap_height = read_signed(2)
@default_char, @break_char, @max_context = read(6, 'nnn')
# Set this to zero until GSUB/GPOS support has been implemented.
# This value is calculated via those tables, and should be set to
# zero if the data is not available.
@max_context = 0
end
end
end
end
end
end
| 39.273632 | 80 | 0.561502 |
6a8bd5f69072a9629743488f26d91eb691a2728a | 184 | require 'test_helper'
class UsertypesControllerTest < ActionDispatch::IntegrationTest
test "should get index" do
get usertypes_index_url
assert_response :success
end
end
| 18.4 | 63 | 0.788043 |
1a09ca7b73290b1b35247e0465b98ef5e098f9cf | 635 | require 'yajl/json_gem'
require 'yajl'
require 'byebug'
require 'pretty_search/version'
require 'pretty_search/cli_options'
require 'pretty_search/query'
require 'pretty_search/collection'
require 'pretty_search/document'
module PrettySearch
class MissingParameter < StandardError; end
def self.run(query, data: nil, **options)
if data.nil?
raise MissingParameter, 'Data file is required, please pass in as --data'
end
collection = PrettySearch::Collection.load(data, options)
found = collection.search(query)
if found.empty?
'No records found.'
else
found.join("\n")
end
end
end
| 23.518519 | 79 | 0.722835 |
ac5d9c724281d0b477cb58f05161a4469c51d4c3 | 8,799 | # encoding: utf-8
require File.expand_path('../../spec_helper.rb', __FILE__)
describe Backup::Storage::CloudFiles do
let(:model) { Backup::Model.new(:test_trigger, 'test label') }
let(:storage) do
Backup::Storage::CloudFiles.new(model) do |cf|
cf.username = 'my_username'
cf.api_key = 'my_api_key'
cf.auth_url = 'lon.auth.api.rackspacecloud.com'
cf.container = 'my_container'
cf.keep = 5
end
end
it 'should be a subclass of Storage::Base' do
Backup::Storage::CloudFiles.
superclass.should == Backup::Storage::Base
end
describe '#initialize' do
after { Backup::Storage::CloudFiles.clear_defaults! }
it 'should load pre-configured defaults through Base' do
Backup::Storage::CloudFiles.any_instance.expects(:load_defaults!)
storage
end
it 'should pass the model reference to Base' do
storage.instance_variable_get(:@model).should == model
end
it 'should pass the storage_id to Base' do
storage = Backup::Storage::CloudFiles.new(model, 'my_storage_id')
storage.storage_id.should == 'my_storage_id'
end
context 'when no pre-configured defaults have been set' do
it 'should use the values given' do
storage.username.should == 'my_username'
storage.api_key.should == 'my_api_key'
storage.auth_url.should == 'lon.auth.api.rackspacecloud.com'
storage.container.should == 'my_container'
storage.servicenet.should == false
storage.path.should == 'backups'
storage.storage_id.should be_nil
storage.keep.should == 5
end
it 'should use default values if none are given' do
storage = Backup::Storage::CloudFiles.new(model)
storage.username.should be_nil
storage.api_key.should be_nil
storage.auth_url.should be_nil
storage.container.should be_nil
storage.servicenet.should == false
storage.path.should == 'backups'
storage.storage_id.should be_nil
storage.keep.should be_nil
end
end # context 'when no pre-configured defaults have been set'
context 'when pre-configured defaults have been set' do
before do
Backup::Storage::CloudFiles.defaults do |s|
s.username = 'some_username'
s.api_key = 'some_api_key'
s.auth_url = 'some_auth_url'
s.container = 'some_container'
s.servicenet = true
s.path = 'some_path'
s.keep = 15
end
end
it 'should use pre-configured defaults' do
storage = Backup::Storage::CloudFiles.new(model)
storage.username.should == 'some_username'
storage.api_key.should == 'some_api_key'
storage.auth_url.should == 'some_auth_url'
storage.container.should == 'some_container'
storage.servicenet.should == true
storage.path.should == 'some_path'
storage.storage_id.should be_nil
storage.keep.should == 15
end
it 'should override pre-configured defaults' do
storage = Backup::Storage::CloudFiles.new(model) do |s|
s.username = 'new_username'
s.api_key = 'new_api_key'
s.auth_url = 'new_auth_url'
s.container = 'new_container'
s.servicenet = false
s.path = 'new_path'
s.keep = 10
end
storage.username.should == 'new_username'
storage.api_key.should == 'new_api_key'
storage.auth_url.should == 'new_auth_url'
storage.container.should == 'new_container'
storage.servicenet.should == false
storage.path.should == 'new_path'
storage.storage_id.should be_nil
storage.keep.should == 10
end
end # context 'when pre-configured defaults have been set'
end # describe '#initialize'
describe '#provider' do
it 'should set the Fog provider' do
storage.send(:provider).should == 'Rackspace'
end
end
describe '#connection' do
let(:connection) { mock }
context 'when @servicenet is set to false' do
it 'should create a new standard connection' do
Fog::Storage.expects(:new).once.with(
:provider => 'Rackspace',
:rackspace_username => 'my_username',
:rackspace_api_key => 'my_api_key',
:rackspace_auth_url => 'lon.auth.api.rackspacecloud.com',
:rackspace_servicenet => false
).returns(connection)
storage.send(:connection).should == connection
end
end
context 'when @servicenet is set to true' do
before do
storage.servicenet = true
end
it 'should create a new servicenet connection' do
Fog::Storage.expects(:new).once.with(
:provider => 'Rackspace',
:rackspace_username => 'my_username',
:rackspace_api_key => 'my_api_key',
:rackspace_auth_url => 'lon.auth.api.rackspacecloud.com',
:rackspace_servicenet => true
).returns(connection)
storage.send(:connection).should == connection
end
end
it 'should return an existing connection' do
Fog::Storage.expects(:new).once.returns(connection)
storage.send(:connection).should == connection
storage.send(:connection).should == connection
end
end # describe '#connection'
describe '#transfer!' do
let(:connection) { mock }
let(:package) { mock }
let(:file) { mock }
let(:s) { sequence '' }
before do
storage.instance_variable_set(:@package, package)
storage.stubs(:storage_name).returns('Storage::CloudFiles')
storage.stubs(:local_path).returns('/local/path')
storage.stubs(:connection).returns(connection)
end
it 'should transfer the package files' do
storage.expects(:remote_path_for).in_sequence(s).with(package).
returns('remote/path')
storage.expects(:files_to_transfer_for).in_sequence(s).with(package).
multiple_yields(
['2011.12.31.11.00.02.backup.tar.enc-aa', 'backup.tar.enc-aa'],
['2011.12.31.11.00.02.backup.tar.enc-ab', 'backup.tar.enc-ab']
)
# first yield
Backup::Logger.expects(:message).in_sequence(s).with(
"Storage::CloudFiles started transferring " +
"'2011.12.31.11.00.02.backup.tar.enc-aa'."
)
File.expects(:open).in_sequence(s).with(
File.join('/local/path', '2011.12.31.11.00.02.backup.tar.enc-aa'), 'r'
).yields(file)
connection.expects(:put_object).in_sequence(s).with(
'my_container', File.join('remote/path', 'backup.tar.enc-aa'), file
)
# second yield
Backup::Logger.expects(:message).in_sequence(s).with(
"Storage::CloudFiles started transferring " +
"'2011.12.31.11.00.02.backup.tar.enc-ab'."
)
File.expects(:open).in_sequence(s).with(
File.join('/local/path', '2011.12.31.11.00.02.backup.tar.enc-ab'), 'r'
).yields(file)
connection.expects(:put_object).in_sequence(s).with(
'my_container', File.join('remote/path', 'backup.tar.enc-ab'), file
)
storage.send(:transfer!)
end
end # describe '#transfer!'
describe '#remove!' do
let(:package) { mock }
let(:connection) { mock }
let(:s) { sequence '' }
before do
storage.stubs(:storage_name).returns('Storage::CloudFiles')
storage.stubs(:connection).returns(connection)
end
it 'should remove the package files' do
storage.expects(:remote_path_for).in_sequence(s).with(package).
returns('remote/path')
storage.expects(:transferred_files_for).in_sequence(s).with(package).
multiple_yields(
['2011.12.31.11.00.02.backup.tar.enc-aa', 'backup.tar.enc-aa'],
['2011.12.31.11.00.02.backup.tar.enc-ab', 'backup.tar.enc-ab']
)
# first yield
Backup::Logger.expects(:message).in_sequence(s).with(
"Storage::CloudFiles started removing " +
"'2011.12.31.11.00.02.backup.tar.enc-aa' " +
"from container 'my_container'."
)
connection.expects(:delete_object).in_sequence(s).with(
'my_container', File.join('remote/path', 'backup.tar.enc-aa')
)
# second yield
Backup::Logger.expects(:message).in_sequence(s).with(
"Storage::CloudFiles started removing " +
"'2011.12.31.11.00.02.backup.tar.enc-ab' " +
"from container 'my_container'."
)
connection.expects(:delete_object).in_sequence(s).with(
'my_container', File.join('remote/path', 'backup.tar.enc-ab')
)
storage.send(:remove!, package)
end
end # describe '#remove!'
end
| 34.505882 | 78 | 0.619616 |
bbc7c5da0c8429c9395c704ce0978e235e351f02 | 1,326 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core/handler/reverse_https'
require 'msf/base/sessions/meterpreter_options'
require 'msf/base/sessions/mettle_config'
require 'msf/base/sessions/meterpreter_aarch64_linux'
module MetasploitModule
CachedSize = 1102904
include Msf::Payload::Single
include Msf::Sessions::MeterpreterOptions
include Msf::Sessions::MettleConfig
def initialize(info = {})
super(
update_info(
info,
'Name' => 'Linux Meterpreter, Reverse HTTPS Inline',
'Description' => 'Run the Meterpreter / Mettle server payload (stageless)',
'Author' => [
'Adam Cammack <adam_cammack[at]rapid7.com>',
'Brent Cook <brent_cook[at]rapid7.com>',
'timwr'
],
'Platform' => 'linux',
'Arch' => ARCH_AARCH64,
'License' => MSF_LICENSE,
'Handler' => Msf::Handler::ReverseHttps,
'Session' => Msf::Sessions::Meterpreter_aarch64_Linux
)
)
end
def generate
opts = {
scheme: 'https',
stageless: true
}
MetasploitPayloads::Mettle.new('aarch64-linux-musl', generate_config(opts)).to_binary :exec
end
end
| 28.212766 | 95 | 0.63273 |
7a705b81c32c6e9ab58f410653a779860800f4e6 | 59 | require "rman/version"
require "rman/cli"
module Rman
end
| 9.833333 | 22 | 0.762712 |
266901f9ae9ca1f1805033fc25fa94c21e080424 | 6,328 | require 'rails_helper'
describe TableHelper do
include FormatHelper
include UtilityHelper
include I18nHelper
include CrudTestHelper
before(:all) do
reset_db
setup_db
create_test_data
end
after(:all) { reset_db }
describe '#plain_table' do
subject { plain_table(%w[foo bar], :size) { |t| t.attrs :upcase } }
it 'contains attrs' do
is_expected.to match(/<th>Size<\/th>/)
end
it 'contains block' do
is_expected.to match(/<th>Upcase<\/th>/)
end
end
describe '#plain_table_or_message' do
context 'with empty data' do
subject { plain_table_or_message([]) }
it { is_expected.to be_html_safe }
it 'handles empty data' do
is_expected.to match(/div class=.table.\>.+\<\/div\>/)
end
end
context 'with data' do
subject do
plain_table_or_message(%w[foo bar], :size) { |t| t.attrs :upcase }
end
it { is_expected.to be_html_safe }
it 'renders table' do
is_expected.to match(/^\<table.*\<\/table\>$/)
end
end
end
describe '#list_table' do
let(:entries) { CrudTestModel.all }
context 'default' do
subject do
with_test_routing { list_table }
end
it 'has 7 rows' do
expect_pattern_count(REGEXP_ROWS, 7)
end
it 'has 14 sortable headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 14)
end
end
context 'with custom attributes' do
subject do
with_test_routing { list_table(:name, :children, :companion_id) }
end
it 'has 7 rows' do
expect_pattern_count(REGEXP_ROWS, 7)
end
it 'has 3 sortable headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 3)
end
end
context 'with custom block' do
subject do
with_test_routing do
list_table do |t|
t.attrs :name, :children, :companion_id
t.col('head') { |e| content_tag(:span, e.income.to_s) }
end
end
end
it 'has 7 rows' do
expect_pattern_count(REGEXP_ROWS, 7)
end
it 'has 4 headers' do
expect_pattern_count(REGEXP_HEADERS, 4)
end
it 'has 0 sortable headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 0)
end
it 'has 6 spans' do
expect_pattern_count(/<span>.+?<\/span>/, 6)
end
end
context 'with custom attributes and block' do
subject do
with_test_routing do
list_table(:name, :children, :companion_id) do |t|
t.col('head') { |e| content_tag(:span, e.income.to_s) }
end
end
end
it 'has 7 rows' do
expect_pattern_count(REGEXP_ROWS, 7)
end
it 'has 4 headers' do
expect_pattern_count(REGEXP_HEADERS, 4)
end
it 'has 3 sortable headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 3)
end
it 'has 6 spans' do
expect_pattern_count(/<span>.+?<\/span>/, 6)
end
end
context 'with ascending sort params' do
let(:params) { { sort: 'children', sort_dir: 'asc' } }
subject do
with_test_routing { list_table }
end
it 'has 13 sortable headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 13)
end
it 'has 1 ascending sort headers' do
expect_pattern_count(
/<th><a .*?sort_dir=desc.*?>Children<\/a> ↓<\/th>/, 1
)
end
end
context 'with descending sort params' do
let(:params) { { sort: 'children', sort_dir: 'desc' } }
subject do
with_test_routing { list_table }
end
it 'has 13 sortable headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 13)
end
it 'has 1 descending sort headers' do
expect_pattern_count(
/<th><a .*?sort_dir=asc.*?>Children<\/a> ↑<\/th>/, 1
)
end
end
context 'with custom column sort params' do
let(:params) { { sort: 'chatty', sort_dir: 'asc' } }
subject do
with_test_routing { list_table(:name, :children, :chatty) }
end
it 'has 2 sortable headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 2)
end
it 'has 1 ascending sort headers' do
expect_pattern_count(
/<th><a .*?sort_dir=desc.*?>Chatty<\/a> ↓<\/th>/, 1
)
end
end
end
describe '#crud_table' do
let(:entries) { CrudTestModel.all }
context 'default' do
subject do
with_test_routing { crud_table }
end
it 'has 7 rows' do
expect_pattern_count(REGEXP_ROWS, 7)
end
it 'has 14 sort headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 14)
end
it 'has 12 action cells' do
expect_pattern_count(REGEXP_ACTION_CELL, 12)
end
end
context 'with custom attrs' do
subject do
with_test_routing { crud_table(:name, :children, :companion_id) }
end
it 'has 3 sort headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 3)
end
end
context 'with custom block' do
subject do
with_test_routing do
crud_table do |t|
t.attrs :name, :children, :companion_id
t.col('head') { |e| content_tag(:span, e.income.to_s) }
end
end
end
it 'has 4 headers' do
expect_pattern_count(REGEXP_HEADERS, 6)
end
it 'has 6 custom col spans' do
expect_pattern_count(/<span>.+?<\/span>/m, 6)
end
it 'has 12 action cells' do
expect_pattern_count(REGEXP_ACTION_CELL, 12)
end
end
context 'with custom attributes and block' do
subject do
with_test_routing do
crud_table(:name, :children, :companion_id) do |t|
t.col('head') { |e| content_tag(:span, e.income.to_s) }
end
end
end
it 'has 3 sort headers' do
expect_pattern_count(REGEXP_SORT_HEADERS, 3)
end
it 'has 6 custom col spans' do
expect_pattern_count(/<span>.+?<\/span>/m, 6)
end
it 'has 12 action cells' do
expect_pattern_count(REGEXP_ACTION_CELL, 12)
end
end
end
def expect_pattern_count(pattern, count)
expect(subject.scan(pattern).size).to eq(count)
end
end
| 23.264706 | 74 | 0.591814 |
1140f4875690aef595ea024ab7f3a44dd04c02a2 | 685 | # -*- encoding: utf-8 -*-
# stub: possibly 0.2.0 ruby lib
Gem::Specification.new do |s|
s.name = "possibly"
s.version = "0.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Mikko Koski"]
s.date = "2014-04-25"
s.description = "Maybe monad implementation for Ruby (some might call it Option pattern or Null pattern)"
s.email = "[email protected]"
s.homepage = "https://github.com/rap1ds/ruby-possibly"
s.licenses = ["MIT"]
s.rubygems_version = "2.4.5.1"
s.summary = "Maybe monad"
s.installed_by_version = "2.4.5.1" if s.respond_to? :installed_by_version
end
| 32.619048 | 107 | 0.683212 |
ab4e1b3a53069b60a1a2bf8b94abbfd469c39d85 | 548 | $LOAD_PATH.unshift __dir__
require 'bundler'
Bundler.require :default
require 'temporal'
require 'temporal/metrics_adapters/log'
metrics_logger = Logger.new(STDOUT, progname: 'metrics')
Temporal.configure do |config|
config.host = ENV.fetch('TEMPORAL_HOST', 'localhost')
config.port = ENV.fetch('TEMPORAL_PORT', 7233).to_i
config.namespace = ENV.fetch('TEMPORAL_NAMESPACE', 'ruby-samples')
config.task_queue = ENV.fetch('TEMPORAL_TASK_QUEUE', 'general')
config.metrics_adapter = Temporal::MetricsAdapters::Log.new(metrics_logger)
end
| 30.444444 | 77 | 0.773723 |
f716145bf49ee88b510489aa7ffd5894d3bca058 | 899 | class MicropostsController < ApplicationController
before_action :logged_in_user, only: [:create, :destroy]
before_action :correct_user, only: :destroy
def create
@micropost = current_user.microposts.build(micropost_params)
@micropost.image.attach(params[:micropost][:image])
if @micropost.save
flash[:success] = "Micropost created!"
redirect_to root_url
else
@feed_items = current_user.feed.paginate(page: params[:page])
render 'static_pages/home'
end
end
def destroy
@micropost.destroy
flash[:success] = "Micropost deleted"
redirect_to request.referrer || root_url
end
private
def micropost_params
params.require(:micropost).permit(:content, :image)
end
def correct_user
@micropost = current_user.microposts.find_by(id: params[:id])
redirect_to root_url if @micropost.nil?
end
end | 27.242424 | 67 | 0.701891 |
e84a6f4d25adbe44e7b0eae9fb83c529b76e6d34 | 2,630 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataMigration::Mgmt::V2018_04_19
module Models
#
# Model object.
#
#
class MigrateSqlServerSqlDbSyncTaskOutputDatabaseError < MigrateSqlServerSqlDbSyncTaskOutput
include MsRestAzure
def initialize
@resultType = "DatabaseLevelErrorOutput"
end
attr_accessor :resultType
# @return [String] Error message
attr_accessor :error_message
# @return [Array<SyncMigrationDatabaseErrorEvent>] List of error events.
attr_accessor :events
#
# Mapper for MigrateSqlServerSqlDbSyncTaskOutputDatabaseError class as
# Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'DatabaseLevelErrorOutput',
type: {
name: 'Composite',
class_name: 'MigrateSqlServerSqlDbSyncTaskOutputDatabaseError',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
resultType: {
client_side_validation: true,
required: true,
serialized_name: 'resultType',
type: {
name: 'String'
}
},
error_message: {
client_side_validation: true,
required: false,
serialized_name: 'errorMessage',
type: {
name: 'String'
}
},
events: {
client_side_validation: true,
required: false,
serialized_name: 'events',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'SyncMigrationDatabaseErrorEventElementType',
type: {
name: 'Composite',
class_name: 'SyncMigrationDatabaseErrorEvent'
}
}
}
}
}
}
}
end
end
end
end
| 28.27957 | 96 | 0.49924 |
79fcfa0ef067740fd0397b4c99791e06e1f389cf | 11,186 | require "rails_helper"
RSpec.describe "Copying a vacancy" do
let(:school) { create(:school) }
let(:document_copy) { double("document_copy") }
before do
allow(DocumentCopy).to receive(:new).and_return(document_copy)
allow(document_copy).to receive(:copy).and_return(document_copy)
allow(document_copy).to receive_message_chain(:copied, :web_content_link).and_return("test_url")
allow(document_copy).to receive_message_chain(:copied, :id).and_return("test_id")
allow(document_copy).to receive(:google_error).and_return(false)
end
before(:each) do
stub_publishers_auth(urn: school.urn)
end
describe "#cancel_copy" do
scenario "a copy can be cancelled using the cancel copy back link" do
original_vacancy = build(:vacancy, :past_publish)
original_vacancy.save(validate: false) # Validation prevents publishing on a past date
original_vacancy.organisation_vacancies.create(organisation: school)
new_vacancy = original_vacancy.dup
new_vacancy.job_title = "A new job title"
visit organisation_path
within(".card-component__actions") do
click_on I18n.t("jobs.copy_link")
end
fill_in_copy_vacancy_form_fields(new_vacancy)
click_on(I18n.t("buttons.cancel_copy"), class: "govuk-back-link")
expect(page.current_path).to eq(organisation_path)
expect(page).not_to have_content("A new job title")
end
scenario "a copy can be cancelled using the cancel copy link" do
original_vacancy = build(:vacancy, :past_publish)
original_vacancy.save(validate: false) # Validation prevents publishing on a past date
original_vacancy.organisation_vacancies.create(organisation: school)
new_vacancy = original_vacancy.dup
new_vacancy.job_title = "A new job title"
visit organisation_path
within(".card-component__actions") do
click_on I18n.t("jobs.copy_link")
end
fill_in_copy_vacancy_form_fields(new_vacancy)
click_on(I18n.t("buttons.cancel_copy"), class: "govuk-link")
expect(page.current_path).to eq(organisation_path)
expect(page).not_to have_content("A new job title")
end
end
scenario "a job can be successfully copied and published" do
original_vacancy = build(:vacancy, :past_publish)
original_vacancy.save(validate: false) # Validation prevents publishing on a past date
original_vacancy.organisation_vacancies.create(organisation: school)
new_vacancy = original_vacancy.dup
new_vacancy.organisation_vacancies.build(organisation: school)
new_vacancy.job_title = "A new job title"
new_vacancy.starts_on = 35.days.from_now
new_vacancy.publish_on = 0.days.from_now
new_vacancy.expires_at = new_vacancy.expires_on = 30.days.from_now
visit organisation_path
within(".card-component__actions") do
click_on I18n.t("jobs.copy_link")
end
within("h1.govuk-heading-m") do
expect(page).to have_content(I18n.t("jobs.copy_job_title", job_title: original_vacancy.job_title))
end
fill_in_copy_vacancy_form_fields(new_vacancy)
click_on I18n.t("buttons.continue")
within("h2.govuk-heading-l") do
expect(page).to have_content(I18n.t("jobs.copy_review_heading"))
end
click_on I18n.t("buttons.submit_job_listing")
expect(page).to have_content(I18n.t("jobs.confirmation_page.submitted"))
click_on I18n.t("jobs.confirmation_page.view_posted_job")
expect(page).to have_content(new_vacancy.job_title)
expect(page).to have_content(new_vacancy.starts_on.to_s.strip)
expect(page).to have_content(new_vacancy.publish_on.to_s.strip)
expect(page).not_to have_content(original_vacancy.job_title)
expect(page).not_to have_content(original_vacancy.starts_on)
expect(page).not_to have_content(original_vacancy.publish_on)
expect(page).not_to have_content(original_vacancy.expires_on)
new_application_deadline = "#{format_date(new_vacancy.expires_on)} at #{format_time(new_vacancy.expires_at)}"
expect(page).to have_content(new_application_deadline)
end
context "when the original job is now invalid" do
scenario "the job can be successfully copied but not published until valid" do
original_vacancy = build(:vacancy, :complete, about_school: nil, job_location: "at_one_school")
original_vacancy.send(:set_slug)
original_vacancy.save(validate: false)
original_vacancy.organisation_vacancies.create(organisation: school)
visit organisation_path
new_vacancy = original_vacancy.dup
new_vacancy.job_title = "A new job title"
new_vacancy.starts_on = 35.days.from_now
new_vacancy.publish_on = 0.days.from_now
new_vacancy.expires_at = new_vacancy.expires_on = 30.days.from_now
within(".card-component__actions") do
click_on I18n.t("jobs.copy_link")
end
within("h1.govuk-heading-m") do
expect(page).to have_content(I18n.t("jobs.copy_job_title", job_title: original_vacancy.job_title))
end
fill_in_copy_vacancy_form_fields(new_vacancy)
click_on I18n.t("buttons.continue")
within("h2.govuk-heading-l") do
expect(page).to have_content(I18n.t("jobs.copy_review_heading"))
end
expect(page).to have_content(I18n.t("messages.jobs.action_required.heading"))
expect(page).to have_content(I18n.t("messages.jobs.action_required.message"))
expect(page).to have_content(I18n.t("job_summary_errors.about_school.blank", organisation: "school"))
click_on I18n.t("buttons.submit_job_listing")
expect(page).to have_content(I18n.t("messages.jobs.action_required.heading"))
expect(page).to have_content(I18n.t("messages.jobs.action_required.message"))
expect(page).to have_content(I18n.t("job_summary_errors.about_school.blank", organisation: "school"))
click_header_link(I18n.t("jobs.job_summary"))
fill_in "publishers_job_listing_job_summary_form[about_school]", with: "Some description about the school"
click_on I18n.t("buttons.update_job")
within("h2.govuk-heading-l") do
expect(page).to have_content(I18n.t("jobs.copy_review_heading"))
end
expect(page).to have_content("Some description about the school")
click_on I18n.t("buttons.submit_job_listing")
expect(page).to have_content(I18n.t("jobs.confirmation_page.submitted"))
click_on I18n.t("jobs.confirmation_page.view_posted_job")
expect(page).to have_content("Some description about the school")
end
end
context "when the original job is pending/scheduled/future_publish" do
scenario "a job can be successfully copied" do
original_vacancy = create(:vacancy, :future_publish)
original_vacancy.organisation_vacancies.create(organisation: school)
visit organisation_path
click_on I18n.t("jobs.pending_jobs")
within(".card-component__actions") do
click_on I18n.t("jobs.copy_link")
end
within("h1.govuk-heading-m") do
expect(page).to have_content(I18n.t("jobs.copy_job_title", job_title: original_vacancy.job_title))
end
click_on I18n.t("buttons.continue")
within("h2.govuk-heading-l") do
expect(page).to have_content(I18n.t("jobs.copy_review_heading"))
end
end
end
context "when the original job has expired" do
scenario "a job can be successfully copied" do
original_vacancy = create(:vacancy, :expired)
original_vacancy.organisation_vacancies.create(organisation: school)
new_vacancy = original_vacancy.dup
new_vacancy.job_title = "A new job title"
new_vacancy.starts_on = 35.days.from_now
new_vacancy.publish_on = 0.days.from_now
new_vacancy.expires_on = 30.days.from_now
visit organisation_path
click_on I18n.t("jobs.expired_jobs")
within(".card-component__actions") do
click_on I18n.t("jobs.copy_link")
end
within("h1.govuk-heading-m") do
expect(page).to have_content(I18n.t("jobs.copy_job_title", job_title: original_vacancy.job_title))
end
fill_in_copy_vacancy_form_fields(new_vacancy)
click_on I18n.t("buttons.continue")
within("h2.govuk-heading-l") do
expect(page).to have_content(I18n.t("jobs.copy_review_heading"))
end
end
end
context "when a copied job has an invalid date" do
scenario "it shows a validation error" do
original_vacancy = build(:vacancy, :past_publish)
original_vacancy.save(validate: false) # Validation prevents publishing on a past date
original_vacancy.organisation_vacancies.create(organisation: school)
new_vacancy = original_vacancy.dup
new_vacancy.job_title = "A new job title"
new_vacancy.publish_on = 0.days.from_now
new_vacancy.expires_at = new_vacancy.expires_on
visit organisation_path
within(".card-component__actions") do
click_on I18n.t("jobs.copy_link")
end
within("h1.govuk-heading-m") do
expect(page).to have_content(I18n.t("jobs.copy_job_title", job_title: original_vacancy.job_title))
end
fill_in_copy_vacancy_form_fields(new_vacancy)
fill_in "publishers_job_listing_copy_vacancy_form[expires_on(2i)]", with: "090"
click_on I18n.t("buttons.continue")
expect(page).to have_content(I18n.t("activerecord.errors.models.vacancy.attributes.expires_on.invalid"))
end
end
describe "validations" do
let!(:original_vacancy) do
vacancy = build(:vacancy, :past_publish)
vacancy.save(validate: false) # Validation prevents publishing on a past date
vacancy.organisation_vacancies.create(organisation: school)
vacancy
end
let(:new_vacancy) { build(:vacancy, original_vacancy.attributes.merge(new_attributes)) }
before do
visit organisation_path
within(".card-component__actions") do
click_on I18n.t("jobs.copy_link")
end
expect(page).to have_content(I18n.t("jobs.copy_job_title", job_title: original_vacancy.job_title))
fill_in_copy_vacancy_form_fields(new_vacancy)
click_on I18n.t("buttons.continue")
end
context "when publish on is blank" do
let(:new_attributes) { { publish_on: nil } }
it "shows an error" do
expect(page).to have_content(I18n.t("activerecord.errors.models.vacancy.attributes.publish_on.blank"))
end
end
context "when publish on date is in the past" do
let(:new_attributes) { { publish_on: 1.day.ago } }
it "shows an error" do
expect(page).to have_content(I18n.t("activerecord.errors.models.vacancy.attributes.publish_on.before_today"))
end
end
context "when expires on is blank" do
let(:new_attributes) { { expires_on: nil } }
it "shows an error" do
expect(page).to have_content(I18n.t("activerecord.errors.models.vacancy.attributes.expires_on.blank"))
end
end
context "when job title is blank" do
let(:new_attributes) { { job_title: nil } }
it "shows an error" do
expect(page).to have_content(I18n.t("activerecord.errors.models.vacancy.attributes.job_title.blank"))
end
end
end
end
| 36.555556 | 117 | 0.720007 |
1827283265a25594923fbdf5b2d14893b49214f3 | 1,589 | require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
@other_user = users(:archer)
end
test "should redirect index when not logged in" do
get users_path
assert_redirected_to login_url
end
test "should get new" do
get signup_path
assert_response :success
end
test "should redirect edit when logged in as wrong user" do
log_in_as(@other_user)
get edit_user_path(@user)
assert flash.empty?
assert_redirected_to root_url
end
test "should redirect update when logged in as wrong user" do
log_in_as(@other_user)
patch user_path(@user), params: { user: { name: @user.name,
email: @user.email } }
assert flash.empty?
assert_redirected_to root_url
end
test "should not allow the admin attribute to be edited via the web" do
log_in_as(@other_user)
assert_not @other_user.admin?
patch user_path(@other_user), params: {
user: { password: 'asdfgh',
password_confirmation: 'asdfgh',
admin: true } }
assert_not @other_user.reload.admin?
end
test "should redirect destroy when not logged in" do
assert_no_difference 'User.count' do
delete user_path(@user)
end
assert_redirected_to login_url
end
test "should redirect destroy when logged in as a non-admin" do
log_in_as(@other_user)
assert_no_difference 'User.count' do
delete user_path(@user)
end
assert_redirected_to root_url
end
end
| 26.483333 | 73 | 0.670233 |
d5af8e2ff2b86c5b87ebaeca3ea3fdd2c215603b | 399 | class Post < ApplicationRecord
belongs_to :topic
belongs_to :user
validates :title, presence: true, uniqueness: true
validates :body, presence: true, length: { minimum:20 }
before_save :set_visits_count
def update_visits_count
self.update(visits_count: self.visits_count + 1)
end
private
def set_visits_count
self.visits_count ||= 0
end
end | 23.470588 | 59 | 0.689223 |
26a5e039f37ae45b4135ce8be1d1f91bdde5faa7 | 1,076 | class DemandsController < ApplicationController
def create
@demand = Demand.new
@demand.author = User.find(params[:author_id])
@demand.reader = User.find(params[:reader_id])
@demand.article = Article.find(params[:article_id])
if @demand.save
# send a request to author
flash[:message] = "Your request has been sent to the author."
@article = @demand.article
respond_to do |format|
format.html { redirect_to @article } #render 'articles/show', article: @article }
format.json { render json: {"notification": "Your request has been sent to the author."} }
end
else
flash[:notice] = "We could not complete your request."
redirect_to root_path
end
end
def show
@demand = Demand.find(params[:id])
respond_to do |format|
format.html { render :show }
format.json { render json: @demand }
end
end
def destroy
@demand = Demand.find(params[:id])
@demand.destroy
flash[:notice] = "The request has been removed."
redirect_to root_path
end
end
| 26.243902 | 98 | 0.649628 |
912ada63e4e133d6b7467bb5fc5d7fd86b02b433 | 1,250 | require 'spec_helper'
RSpec.describe 'Biki integration specs' do
let(:client) { Cryptoexchange::Client.new }
let(:market) { 'biki' }
let(:btc_usdt_pair) do
Cryptoexchange::Models::MarketPair.new(base: 'BTC', target: 'USDT', market: market)
end
it 'fetch pairs' do
pairs = client.pairs('biki')
expect(pairs).not_to be_empty
pair = pairs.first
expect(pair.base).to_not be nil
expect(pair.target).to_not be nil
expect(pair.market).to eq 'biki'
end
it 'give trade url' do
trade_page_url = client.trade_page_url market, base: btc_usdt_pair.base, target: btc_usdt_pair.target
expect(trade_page_url).to eq "https://www.biki.com/trade/BTC_USDT"
end
it 'fetch ticker' do
ticker = client.ticker(btc_usdt_pair)
expect(ticker.base).to eq 'BTC'
expect(ticker.target).to eq 'USDT'
expect(ticker.market).to eq market
expect(ticker.bid).to be_a Numeric
expect(ticker.ask).to be_a Numeric
expect(ticker.volume).to be_a Numeric
expect(ticker.last).to be_a Numeric
expect(ticker.low).to be_a Numeric
expect(ticker.high).to be_a Numeric
expect(ticker.change).to be_a Numeric
expect(ticker.timestamp).to be nil
expect(ticker.payload).to_not be nil
end
end
| 29.069767 | 105 | 0.7048 |
38cff6f1f8073335d9ce6b0eafbb195d1d49ac23 | 4,222 | module VCAP::Services::ServiceBrokers::V2
class Client
CATALOG_PATH = '/v2/catalog'.freeze
def initialize(attrs)
@http_client = VCAP::Services::ServiceBrokers::V2::HttpClient.new(attrs)
end
def catalog
response = @http_client.get(CATALOG_PATH)
parse_response(:get, CATALOG_PATH, response)
end
# The broker is expected to guarantee uniqueness of instance_id.
# raises ServiceBrokerConflict if the id is already in use
def provision(instance)
path = "/v2/service_instances/#{instance.guid}"
response = @http_client.put(path, {
service_id: instance.service.broker_provided_id,
plan_id: instance.service_plan.broker_provided_id,
organization_guid: instance.organization.guid,
space_guid: instance.space.guid,
})
parsed_response = parse_response(:put, path, response)
instance.dashboard_url = parsed_response['dashboard_url']
# DEPRECATED, but needed because of not null constraint
instance.credentials = {}
end
def bind(binding)
path = "/v2/service_instances/#{binding.service_instance.guid}/service_bindings/#{binding.guid}"
response = @http_client.put(path, {
service_id: binding.service.broker_provided_id,
plan_id: binding.service_plan.broker_provided_id,
app_guid: binding.app_guid
})
parsed_response = parse_response(:put, path, response)
binding.credentials = parsed_response['credentials']
if parsed_response.has_key?('syslog_drain_url')
binding.syslog_drain_url = parsed_response['syslog_drain_url']
end
end
def unbind(binding)
path = "/v2/service_instances/#{binding.service_instance.guid}/service_bindings/#{binding.guid}"
response = @http_client.delete(path, {
service_id: binding.service.broker_provided_id,
plan_id: binding.service_plan.broker_provided_id,
})
parse_response(:delete, path, response)
end
def deprovision(instance)
path = "/v2/service_instances/#{instance.guid}"
response = @http_client.delete(path, {
service_id: instance.service.broker_provided_id,
plan_id: instance.service_plan.broker_provided_id,
})
parse_response(:delete, path, response)
rescue VCAP::Services::ServiceBrokers::V2::ServiceBrokerConflict => e
raise VCAP::Errors::ApiError.new_from_details("ServiceInstanceDeprovisionFailed", e.message)
end
def update_service_plan(instance, plan)
path = "/v2/service_instances/#{instance.guid}/"
@http_client.patch(path, {
plan_id: plan.broker_provided_id,
previous_values: {
plan_id: instance.service_plan.broker_provided_id,
service_id: instance.service.broker_provided_id,
organization_id: instance.organization.guid,
space_id: instance.space.guid
}
})
end
private
def uri_for(path)
URI(@http_client.url + path)
end
def parse_response(method, path, response)
uri = uri_for(path)
code = response.code.to_i
case code
when 204
return nil # no body
when 200..299
begin
response_hash = MultiJson.load(response.body)
rescue MultiJson::ParseError
end
unless response_hash.is_a?(Hash)
raise VCAP::Services::ServiceBrokers::V2::ServiceBrokerResponseMalformed.new(uri.to_s, method, response)
end
return response_hash
when HTTP::Status::UNAUTHORIZED
raise VCAP::Services::ServiceBrokers::V2::ServiceBrokerApiAuthenticationFailed.new(uri.to_s, method, response)
when 409
raise VCAP::Services::ServiceBrokers::V2::ServiceBrokerConflict.new(uri.to_s, method, response)
when 410
if method == :delete
logger.warn("Already deleted: #{uri.to_s}")
return nil
end
end
raise VCAP::Services::ServiceBrokers::V2::ServiceBrokerBadResponse.new(uri.to_s, method, response)
end
def logger
@logger ||= Steno.logger('cc.service_broker.v2.client')
end
end
end
| 31.507463 | 120 | 0.662245 |
6a7f994320866b733eaea96330faba96883c26e4 | 1,859 | require "pathname"
require "cork"
require "rspec"
require "plist"
require "fileutils"
require "tmpdir"
require "fakefs/spec_helpers"
ROOT = Pathname.new(File.expand_path("../../", __FILE__))
$LOAD_PATH.unshift((ROOT + "lib").to_s)
$LOAD_PATH.unshift((ROOT + "spec").to_s)
require "playgroundbook"
require "linter/abstract_linter"
require "linter/chapter_linter"
require "linter/chapter_manifest_linter"
require "linter/contents_linter"
require "linter/manifest_linter"
require "linter/page_linter"
require "linter/page_manifest_linter"
require "linter/cutscene_page_linter"
require "linter/cutscene_page_manifest_linter"
require "linter/root_manifest_linter"
require "renderer/contents_manifest_generator"
require "renderer/chapter_collator"
require "renderer/page_writer"
require "renderer/page_parser"
require "renderer/glossary_generator"
require "renderer/page_processor"
RSpec.configure do |config|
config.color = true
config.order = :random
Kernel.srand config.seed
end
RSpec::Expectations.configuration.on_potential_false_positives = :nothing
Playgroundbook::AbstractLinter.ui = Cork::Board.new(silent: true)
def test_playground_book
"spec/fixtures/Starter.playgroundbook"
end
def test_book_metadata
{
"name" => "Testing Book",
"chapters" => [ "name" => "test_chapter"],
"identifier" => "com.ashfurrow.testing",
"resources" => "assets",
"cover" => "file.jpeg",
"glossary" => [
{
"term" => "definition"
}
]
}
end
def get_manifest(file_name = Playgroundbook::ManifestFileName)
Plist.parse_xml(file_name)
end
def test_chapter_contents
<<-EOSwift
import UIKit
var str = "Hello, playground"
func sharedFunc() {
print("This should be accessible to all pages.")
}
//// Page 1
str = "Yo, it's page 1."
sharedFunc()
//// Page 2
str = "Page 2 awww yeah."
sharedFunc()
EOSwift
end
| 21.616279 | 73 | 0.740183 |
2891a3542907750b423fe734140e9905d6da64d1 | 359 | module UsersHelper
# Returns the Gravatar for the given user.
def gravatar_for(user, options = { size: 80 })
gravatar_id = Digest::MD5::hexdigest(user.email.downcase)
size = options[:size]
gravatar_url = "https://secure.gravatar.com/avatar/#{gravatar_id}?s=#{size}"
image_tag(gravatar_url, alt: user.name, class: "gravatar")
end
end
| 29.916667 | 80 | 0.696379 |
61f95c17f613ba97b844492cfc4d57145281b97e | 239 | actions :cmd
attribute :command, :kind_of => String
attribute :path, :kind_of => String
attribute :debug, :default => true
attribute :env, :kind_of => String, :default => "dev"
attribute :optional_params, :kind_of => Array, :default => [] | 34.142857 | 61 | 0.702929 |
26edb7e12e4130ce4d5b38fb453bb28cac4c1fad | 1,235 | require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
require "autocomplete_select"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
| 37.424242 | 99 | 0.748988 |
08b9820da2b241a10de85340838c5635a2b80c1b | 8,032 | # encoding: UTF-8
#
# = net/ntlm.rb
#
# An NTLM Authentication Library for Ruby
#
# This code is a derivative of "dbf2.rb" written by yrock
# and Minero Aoki. You can find original code here:
# http://jp.rubyist.net/magazine/?0013-CodeReview
# -------------------------------------------------------------
# Copyright (c) 2005,2006 yrock
#
#
# 2006-02-11 refactored by Minero Aoki
# -------------------------------------------------------------
#
# All protocol information used to write this code stems from
# "The NTLM Authentication Protocol" by Eric Glass. The author
# would thank to him for this tremendous work and making it
# available on the net.
# http://davenport.sourceforge.net/ntlm.html
# -------------------------------------------------------------
# Copyright (c) 2003 Eric Glass
#
# -------------------------------------------------------------
#
# The author also looked Mozilla-Firefox-1.0.7 source code,
# namely, security/manager/ssl/src/nsNTLMAuthModule.cpp and
# Jonathan Bastien-Filiatrault's libntlm-ruby.
# "http://x2a.org/websvn/filedetails.php?
# repname=libntlm-ruby&path=%2Ftrunk%2Fntlm.rb&sc=1"
# The latter has a minor bug in its separate_keys function.
# The third key has to begin from the 14th character of the
# input string instead of 13th:)
#--
# $Id: ntlm.rb,v 1.1 2006/10/05 01:36:52 koheik Exp $
#++
require 'base64'
require 'openssl'
require 'openssl/digest'
require 'socket'
# Load Order is important here
require 'net/ntlm/exceptions'
require 'net/ntlm/field'
require 'net/ntlm/int16_le'
require 'net/ntlm/int32_le'
require 'net/ntlm/int64_le'
require 'net/ntlm/string'
require 'net/ntlm/field_set'
require 'net/ntlm/blob'
require 'net/ntlm/security_buffer'
require 'net/ntlm/message'
require 'net/ntlm/message/type0'
require 'net/ntlm/message/type1'
require 'net/ntlm/message/type2'
require 'net/ntlm/message/type3'
require 'net/ntlm/encode_util'
require 'net/ntlm/client'
require 'net/ntlm/channel_binding'
require 'net/ntlm/target_info'
module Net
module NTLM
LM_MAGIC = "KGS!@\#$%"
TIME_OFFSET = 11644473600
MAX64 = 0xffffffffffffffff
class << self
# Valid format for LAN Manager hex digest portion: 32 hexadecimal characters.
LAN_MANAGER_HEX_DIGEST_REGEXP = /[0-9a-f]{32}/i
# Valid format for NT LAN Manager hex digest portion: 32 hexadecimal characters.
NT_LAN_MANAGER_HEX_DIGEST_REGEXP = /[0-9a-f]{32}/i
# Valid format for an NTLM hash composed of `'<LAN Manager hex digest>:<NT LAN Manager hex digest>'`.
DATA_REGEXP = /\A#{LAN_MANAGER_HEX_DIGEST_REGEXP}:#{NT_LAN_MANAGER_HEX_DIGEST_REGEXP}\z/
# Takes a string and determines whether it is a valid NTLM Hash
# @param [String] the string to validate
# @return [Boolean] whether or not the string is a valid NTLM hash
def is_ntlm_hash?(data)
decoded_data = data.dup
decoded_data = EncodeUtil.decode_utf16le(decoded_data)
if DATA_REGEXP.match(decoded_data)
true
else
false
end
end
# Conver the value to a 64-Bit Little Endian Int
# @param [String] val The string to convert
def pack_int64le(val)
[val & 0x00000000ffffffff, val >> 32].pack("V2")
end
# Builds an array of strings that are 7 characters long
# @param [String] str The string to split
# @api private
def split7(str)
s = str.dup
until s.empty?
(ret ||= []).push s.slice!(0, 7)
end
ret
end
# Not sure what this is doing
# @param [String] str String to generate keys for
# @api private
def gen_keys(str)
split7(str).map{ |str7|
bits = split7(str7.unpack("B*")[0]).inject('')\
{|ret, tkn| ret += tkn + (tkn.gsub('1', '').size % 2).to_s }
[bits].pack("B*")
}
end
def apply_des(plain, keys)
dec = OpenSSL::Cipher::Cipher.new("des-cbc")
dec.padding = 0
keys.map {|k|
dec.key = k
dec.encrypt.update(plain) + dec.final
}
end
# Generates a Lan Manager Hash
# @param [String] password The password to base the hash on
def lm_hash(password)
keys = gen_keys password.upcase.ljust(14, "\0")
apply_des(LM_MAGIC, keys).join
end
# Generate a NTLM Hash
# @param [String] password The password to base the hash on
# @option opt :unicode (false) Unicode encode the password
def ntlm_hash(password, opt = {})
pwd = password.dup
unless opt[:unicode]
pwd = EncodeUtil.encode_utf16le(pwd)
end
OpenSSL::Digest::MD4.digest pwd
end
# Generate a NTLMv2 Hash
# @param [String] user The username
# @param [String] password The password
# @param [String] target The domain or workstation to authenticate to
# @option opt :unicode (false) Unicode encode the domain
def ntlmv2_hash(user, password, target, opt={})
if is_ntlm_hash? password
decoded_password = EncodeUtil.decode_utf16le(password)
ntlmhash = [decoded_password.upcase[33,65]].pack('H32')
else
ntlmhash = ntlm_hash(password, opt)
end
userdomain = user.upcase + target
unless opt[:unicode]
userdomain = EncodeUtil.encode_utf16le(userdomain)
end
OpenSSL::HMAC.digest(OpenSSL::Digest::MD5.new, ntlmhash, userdomain)
end
def lm_response(arg)
begin
hash = arg[:lm_hash]
chal = arg[:challenge]
rescue
raise ArgumentError
end
chal = NTLM::pack_int64le(chal) if chal.is_a?(Integer)
keys = gen_keys hash.ljust(21, "\0")
apply_des(chal, keys).join
end
def ntlm_response(arg)
hash = arg[:ntlm_hash]
chal = arg[:challenge]
chal = NTLM::pack_int64le(chal) if chal.is_a?(Integer)
keys = gen_keys hash.ljust(21, "\0")
apply_des(chal, keys).join
end
def ntlmv2_response(arg, opt = {})
begin
key = arg[:ntlmv2_hash]
chal = arg[:challenge]
ti = arg[:target_info]
rescue
raise ArgumentError
end
chal = NTLM::pack_int64le(chal) if chal.is_a?(Integer)
if opt[:client_challenge]
cc = opt[:client_challenge]
else
cc = rand(MAX64)
end
cc = NTLM::pack_int64le(cc) if cc.is_a?(Integer)
if opt[:timestamp]
ts = opt[:timestamp]
else
ts = Time.now.to_i
end
# epoch -> milsec from Jan 1, 1601
ts = 10_000_000 * (ts + TIME_OFFSET)
blob = Blob.new
blob.timestamp = ts
blob.challenge = cc
blob.target_info = ti
bb = blob.serialize
OpenSSL::HMAC.digest(OpenSSL::Digest::MD5.new, key, chal + bb) + bb
end
def lmv2_response(arg, opt = {})
key = arg[:ntlmv2_hash]
chal = arg[:challenge]
chal = NTLM::pack_int64le(chal) if chal.is_a?(Integer)
if opt[:client_challenge]
cc = opt[:client_challenge]
else
cc = rand(MAX64)
end
cc = NTLM::pack_int64le(cc) if cc.is_a?(Integer)
OpenSSL::HMAC.digest(OpenSSL::Digest::MD5.new, key, chal + cc) + cc
end
def ntlm2_session(arg, opt = {})
begin
passwd_hash = arg[:ntlm_hash]
chal = arg[:challenge]
rescue
raise ArgumentError
end
chal = NTLM::pack_int64le(chal) if chal.is_a?(Integer)
if opt[:client_challenge]
cc = opt[:client_challenge]
else
cc = rand(MAX64)
end
cc = NTLM::pack_int64le(cc) if cc.is_a?(Integer)
keys = gen_keys(passwd_hash.ljust(21, "\0"))
session_hash = OpenSSL::Digest::MD5.digest(chal + cc).slice(0, 8)
response = apply_des(session_hash, keys).join
[cc.ljust(24, "\0"), response]
end
end
end
end
| 30.082397 | 107 | 0.597734 |
26a0e693a7f5966bab01e73a9520379c2552a8eb | 152 | class WelcomeController < ApplicationController
def index
end
def location
end
def sponsors
end
def about
end
def party
end
end
| 8.941176 | 47 | 0.703947 |
f740059f185dac34e8af16f32a097857a203b804 | 102 | # desc "Explaining what the task does"
# task :angular_ui_tinymce_rails do
# # Task goes here
# end
| 20.4 | 38 | 0.72549 |