INSTRUCTION
stringlengths 202
35.5k
| RESPONSE
stringlengths 75
161k
|
---|---|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module ContainerRepository
module Gitlab
class CleanupTagsService < CleanupTagsBaseService
include ::Projects::ContainerRepository::Gitlab::Timeoutable
TAGS_PAGE_SIZE = 1000
def initialize(container_repository:, current_user: nil, params: {})
super
@params = params.dup
end
def execute
with_timeout do |start_time, result|
container_repository.each_tags_page(page_size: TAGS_PAGE_SIZE) do |tags|
execute_for_tags(tags, result)
raise TimeoutError if !timeout_disabled? && timeout?(start_time)
end
end
end
private
def execute_for_tags(tags, overall_result)
original_size = tags.size
filter_out_latest!(tags)
filter_by_name!(tags)
tags = filter_by_keep_n(tags)
tags = filter_by_older_than(tags)
overall_result[:before_delete_size] += tags.size
overall_result[:original_size] += original_size
result = delete_tags(tags)
overall_result[:deleted_size] += result[:deleted]&.size
overall_result[:deleted] += result[:deleted]
overall_result[:status] = result[:status] unless overall_result[:status] == :error
end
def with_timeout
result = success(
original_size: 0,
before_delete_size: 0,
deleted_size: 0,
deleted: []
)
yield Time.zone.now, result
result
rescue TimeoutError
result[:status] = :error
result
end
def filter_by_keep_n(tags)
partition_by_keep_n(tags).first
end
def filter_by_older_than(tags)
partition_by_older_than(tags).first
end
def pushed_at(tag)
tag.updated_at || tag.created_at
end
def timeout_disabled?
params['disable_timeout'] || false
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ContainerRepository::Gitlab::CleanupTagsService, feature_category: :container_registry do
using RSpec::Parameterized::TableSyntax
include_context 'for a cleanup tags service'
let_it_be(:user) { create(:user) }
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :private) }
let(:repository) { create(:container_repository, :root, project: project) }
let(:service) { described_class.new(container_repository: repository, current_user: user, params: params) }
let(:tags) { %w[latest A Ba Bb C D E] }
before do
allow(repository).to receive(:migrated?).and_return(true)
project.add_maintainer(user) if user
stub_container_registry_config(enabled: true)
stub_const("#{described_class}::TAGS_PAGE_SIZE", tags_page_size)
one_hour_ago = 1.hour.ago
five_days_ago = 5.days.ago
six_days_ago = 6.days.ago
one_month_ago = 1.month.ago
stub_tags(
{
'latest' => one_hour_ago,
'A' => one_hour_ago,
'Ba' => five_days_ago,
'Bb' => six_days_ago,
'C' => one_month_ago,
'D' => nil,
'E' => nil
}
)
end
describe '#execute' do
subject { service.execute }
context 'with several tags pages' do
let(:tags_page_size) { 2 }
it_behaves_like 'when regex matching everything is specified',
delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]]
it_behaves_like 'when regex matching everything is specified and latest is not kept',
delete_expectations: [%w[latest A], %w[Ba Bb], %w[C D], %w[E]]
it_behaves_like 'when delete regex matching specific tags is used'
it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex'
it_behaves_like 'with allow regex value',
delete_expectations: [%w[A], %w[C D], %w[E]]
it_behaves_like 'when keeping only N tags',
delete_expectations: [%w[Bb]]
it_behaves_like 'when not keeping N tags',
delete_expectations: [%w[A], %w[Ba Bb], %w[C]]
context 'when removing keeping only 3' do
let(:params) do
{
'name_regex_delete' => '.*',
'keep_n' => 3
}
end
it_behaves_like 'not removing anything'
end
it_behaves_like 'when removing older than 1 day',
delete_expectations: [%w[Ba Bb], %w[C]]
it_behaves_like 'when combining all parameters',
delete_expectations: [%w[Bb], %w[C]]
it_behaves_like 'when running a container_expiration_policy',
delete_expectations: [%w[Bb], %w[C]]
context 'with a timeout' do
let(:params) do
{ 'name_regex_delete' => '.*' }
end
it 'removes the first few pages' do
expect(service).to receive(:timeout?).and_return(false, true)
expect_delete(%w[A])
expect_delete(%w[Ba Bb])
response = expected_service_response(status: :error, deleted: %w[A Ba Bb], original_size: 4)
is_expected.to eq(response)
end
context 'when disable_timeout is set to true' do
let(:params) do
{ 'name_regex_delete' => '.*', 'disable_timeout' => true }
end
it 'does not check if it timed out' do
expect(service).not_to receive(:timeout?)
end
it_behaves_like 'when regex matching everything is specified',
delete_expectations: [%w[A], %w[Ba Bb], %w[C D], %w[E]]
end
end
end
context 'with a single tags page' do
let(:tags_page_size) { 1000 }
it_behaves_like 'when regex matching everything is specified',
delete_expectations: [%w[A Ba Bb C D E]]
it_behaves_like 'when delete regex matching specific tags is used'
it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex'
it_behaves_like 'with allow regex value',
delete_expectations: [%w[A C D E]]
it_behaves_like 'when keeping only N tags',
delete_expectations: [%w[Ba Bb C]]
it_behaves_like 'when not keeping N tags',
delete_expectations: [%w[A Ba Bb C]]
it_behaves_like 'when removing keeping only 3',
delete_expectations: [%w[Ba Bb C]]
it_behaves_like 'when removing older than 1 day',
delete_expectations: [%w[Ba Bb C]]
it_behaves_like 'when combining all parameters',
delete_expectations: [%w[Ba Bb C]]
it_behaves_like 'when running a container_expiration_policy',
delete_expectations: [%w[Ba Bb C]]
end
context 'with no tags page' do
let(:tags_page_size) { 1000 }
let(:deleted) { [] }
let(:params) { {} }
before do
allow(repository.gitlab_api_client)
.to receive(:tags)
.and_return({})
end
it { is_expected.to eq(expected_service_response(status: :success, deleted: [], original_size: 0)) }
end
end
private
def stub_tags(tags)
chunked = tags_page_size < tags.size
previous_last = nil
max_chunk_index = tags.size / tags_page_size
tags.keys.in_groups_of(tags_page_size, false).each_with_index do |chunked_tag_names, index|
last = index == max_chunk_index
pagination_needed = chunked && !last
response = {
pagination: pagination_needed ? pagination_with(last: chunked_tag_names.last) : {},
response_body: chunked_tag_names.map do |name|
tag_raw_response(name, tags[name])
end
}
allow(repository.gitlab_api_client)
.to receive(:tags)
.with(repository.path, page_size: described_class::TAGS_PAGE_SIZE, last: previous_last)
.and_return(response)
previous_last = chunked_tag_names.last
end
end
def pagination_with(last:)
{
next: {
uri: URI("http://test.org?last=#{last}")
}
}
end
def tag_raw_response(name, timestamp)
timestamp_field = name.start_with?('B') ? 'updated_at' : 'created_at'
{
'name' => name,
'digest' => 'sha256:1234567890',
'media_type' => 'application/vnd.oci.image.manifest.v1+json',
timestamp_field => timestamp&.iso8601
}
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module ContainerRepository
module ThirdParty
class DeleteTagsService
include BaseServiceUtility
def initialize(container_repository, tag_names)
@container_repository = container_repository
@tag_names = tag_names
end
# Replace a tag on the registry with a dummy tag.
# This is a hack as the registry doesn't support deleting individual
# tags. This code effectively pushes a dummy image and assigns the tag to it.
# This way when the tag is deleted only the dummy image is affected.
# This is used to preserve compatibility with third-party registries that
# don't support fast delete.
# See https://gitlab.com/gitlab-org/gitlab/issues/15737 for a discussion
def execute
return success(deleted: []) if @tag_names.empty?
# generates the blobs for the dummy image
dummy_manifest = @container_repository.client.generate_empty_manifest(@container_repository.path)
return error('could not generate manifest') if dummy_manifest.nil?
deleted_tags = replace_tag_manifests(dummy_manifest)
# Deletes the dummy image
# All created tag digests are the same since they all have the same dummy image.
# a single delete is sufficient to remove all tags with it
if deleted_tags.any? && @container_repository.delete_tag(deleted_tags.each_value.first)
success(deleted: deleted_tags.keys)
else
error("could not delete tags: #{@tag_names.join(', ')}".truncate(1000))
end
end
private
# update the manifests of the tags with the new dummy image
def replace_tag_manifests(dummy_manifest)
deleted_tags = @tag_names.map do |name|
digest = @container_repository.client.put_tag(@container_repository.path, name, dummy_manifest)
next unless digest
[name, digest]
end.compact.to_h
# make sure the digests are the same (it should always be)
digests = deleted_tags.values.uniq
# rubocop: disable CodeReuse/ActiveRecord
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(ArgumentError.new('multiple tag digests')) if digests.many?
deleted_tags
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ContainerRepository::ThirdParty::DeleteTagsService, feature_category: :container_registry do
include_context 'container repository delete tags service shared context'
let(:service) { described_class.new(repository, tags) }
describe '#execute' do
let(:tags) { %w[A Ba] }
subject { service.execute }
context 'with tags to delete' do
it 'deletes the tags by name' do
stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
tags.each { |tag| stub_put_manifest_request(tag) }
expect_delete_tags(['sha256:dummy'])
is_expected.to eq(status: :success, deleted: tags)
end
it 'succeeds when tag delete returns 404' do
stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
stub_put_manifest_request('A')
stub_put_manifest_request('Ba')
stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:dummy")
.to_return(status: 404, body: '', headers: {})
is_expected.to eq(status: :success, deleted: tags)
end
context 'with failures' do
context 'when the dummy manifest generation fails' do
before do
stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3', success: false)
end
it { is_expected.to eq(status: :error, message: 'could not generate manifest') }
end
context 'when updating tags fails' do
before do
stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3")
.to_return(status: 200, body: '', headers: {})
end
context 'all tag updates fail' do
before do
stub_put_manifest_request('A', 500, {})
stub_put_manifest_request('Ba', 500, {})
end
it { is_expected.to eq(status: :error, message: "could not delete tags: #{tags.join(', ')}") }
context 'when a large list of tag updates fails' do
let(:tags) { Array.new(1000) { |i| "tag_#{i}" } }
before do
expect(service).to receive(:replace_tag_manifests).and_return({})
end
it 'truncates the log message' do
expect(subject).to eq(status: :error, message: "could not delete tags: #{tags.join(', ')}".truncate(1000))
end
end
end
context 'a single tag update fails' do
before do
stub_put_manifest_request('A')
stub_put_manifest_request('Ba', 500, {})
stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:dummy")
.to_return(status: 404, body: '', headers: {})
end
it { is_expected.to eq(status: :success, deleted: ['A']) }
end
end
end
end
context 'with empty tags' do
let_it_be(:tags) { [] }
it 'does not remove anything' do
expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_digest)
is_expected.to eq(status: :success, deleted: [])
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module ContainerRepository
module ThirdParty
class CleanupTagsService < CleanupTagsBaseService
def initialize(container_repository:, current_user: nil, params: {})
super
@params = params.dup
@counts = { cached_tags_count: 0 }
end
def execute
tags = container_repository.tags
@counts[:original_size] = tags.size
filter_out_latest!(tags)
filter_by_name!(tags)
tags = truncate(tags)
populate_from_cache(tags)
tags = filter_keep_n(tags)
tags = filter_by_older_than(tags)
@counts[:before_delete_size] = tags.size
delete_tags(tags).merge(@counts).tap do |result|
result[:deleted_size] = result[:deleted]&.size
result[:status] = :error if @counts[:before_truncate_size] != @counts[:after_truncate_size]
end
end
private
def filter_keep_n(tags)
tags, tags_to_keep = partition_by_keep_n(tags)
cache_tags(tags_to_keep)
tags
end
def filter_by_older_than(tags)
tags, tags_to_keep = partition_by_older_than(tags)
cache_tags(tags_to_keep)
tags
end
def pushed_at(tag)
tag.created_at
end
def truncate(tags)
@counts[:before_truncate_size] = tags.size
@counts[:after_truncate_size] = tags.size
return tags if max_list_size == 0
# truncate the list to make sure that after the #filter_keep_n
# execution, the resulting list will be max_list_size
truncated_size = max_list_size + keep_n_as_integer
return tags if tags.size <= truncated_size
tags = tags.sample(truncated_size)
@counts[:after_truncate_size] = tags.size
tags
end
def populate_from_cache(tags)
@counts[:cached_tags_count] = cache.populate(tags) if caching_enabled?
end
def cache_tags(tags)
cache.insert(tags, older_than_in_seconds) if caching_enabled?
end
def cache
strong_memoize(:cache) do
::Gitlab::ContainerRepository::Tags::Cache.new(container_repository)
end
end
def caching_enabled?
result = current_application_settings.container_registry_expiration_policies_caching &&
container_expiration_policy &&
older_than.present?
!!result
end
def max_list_size
current_application_settings.container_registry_cleanup_tags_service_max_list_size.to_i
end
def current_application_settings
::Gitlab::CurrentSettings.current_application_settings
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ContainerRepository::ThirdParty::CleanupTagsService, :clean_gitlab_redis_cache, feature_category: :container_registry do
using RSpec::Parameterized::TableSyntax
include_context 'for a cleanup tags service'
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :private) }
let(:repository) { create(:container_repository, :root, project: project) }
let(:service) { described_class.new(container_repository: repository, current_user: user, params: params) }
let(:tags) { %w[latest A Ba Bb C D E] }
before do
project.add_maintainer(user) if user
stub_container_registry_config(enabled: true)
stub_container_registry_tags(
repository: repository.path,
tags: tags
)
stub_tag_digest('latest', 'sha256:configA')
stub_tag_digest('A', 'sha256:configA')
stub_tag_digest('Ba', 'sha256:configB')
stub_tag_digest('Bb', 'sha256:configB')
stub_tag_digest('C', 'sha256:configC')
stub_tag_digest('D', 'sha256:configD')
stub_tag_digest('E', nil)
stub_digest_config('sha256:configA', 1.hour.ago)
stub_digest_config('sha256:configB', 5.days.ago)
stub_digest_config('sha256:configC', 1.month.ago)
stub_digest_config('sha256:configD', nil)
end
describe '#execute' do
subject { service.execute }
it_behaves_like 'when regex matching everything is specified',
delete_expectations: [%w[A Ba Bb C D E]],
service_response_extra: {
before_truncate_size: 6,
after_truncate_size: 6,
before_delete_size: 6,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'when regex matching everything is specified and latest is not kept',
delete_expectations: [%w[A Ba Bb C D E latest]],
service_response_extra: {
before_truncate_size: 7,
after_truncate_size: 7,
before_delete_size: 7,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'when delete regex matching specific tags is used',
service_response_extra: {
before_truncate_size: 2,
after_truncate_size: 2,
before_delete_size: 2,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'when delete regex matching specific tags is used with overriding allow regex',
service_response_extra: {
before_truncate_size: 1,
after_truncate_size: 1,
before_delete_size: 1,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'with allow regex value',
delete_expectations: [%w[A C D E]],
service_response_extra: {
before_truncate_size: 4,
after_truncate_size: 4,
before_delete_size: 4,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'when keeping only N tags',
delete_expectations: [%w[Bb Ba C]],
service_response_extra: {
before_truncate_size: 4,
after_truncate_size: 4,
before_delete_size: 3,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'when not keeping N tags',
delete_expectations: [%w[A Ba Bb C]],
service_response_extra: {
before_truncate_size: 4,
after_truncate_size: 4,
before_delete_size: 4,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'when removing keeping only 3',
delete_expectations: [%w[Bb Ba C]],
service_response_extra: {
before_truncate_size: 6,
after_truncate_size: 6,
before_delete_size: 3,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'when removing older than 1 day',
delete_expectations: [%w[Ba Bb C]],
service_response_extra: {
before_truncate_size: 6,
after_truncate_size: 6,
before_delete_size: 3,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'when combining all parameters',
delete_expectations: [%w[Bb Ba C]],
service_response_extra: {
before_truncate_size: 6,
after_truncate_size: 6,
before_delete_size: 3,
cached_tags_count: 0
},
supports_caching: true
it_behaves_like 'when running a container_expiration_policy',
delete_expectations: [%w[Bb Ba C]],
service_response_extra: {
before_truncate_size: 6,
after_truncate_size: 6,
before_delete_size: 3,
cached_tags_count: 0
},
supports_caching: true
context 'when running a container_expiration_policy with caching' do
let(:user) { nil }
let(:params) do
{
'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day',
'container_expiration_policy' => true
}
end
it 'expects caching to be used' do
expect_delete(%w[Bb Ba C], container_expiration_policy: true)
expect_caching
subject
end
context 'when setting set to false' do
before do
stub_application_setting(container_registry_expiration_policies_caching: false)
end
it 'does not use caching' do
expect_delete(%w[Bb Ba C], container_expiration_policy: true)
expect_no_caching
subject
end
end
end
context 'when truncating the tags list' do
let(:params) do
{
'name_regex_delete' => '.*',
'keep_n' => 1
}
end
shared_examples 'returning the response' do
|status:, original_size:, before_truncate_size:, after_truncate_size:, before_delete_size:|
it 'returns the response' do
expect_no_caching
result = subject
service_response = expected_service_response(
status: status,
original_size: original_size,
deleted: nil
).merge(
before_truncate_size: before_truncate_size,
after_truncate_size: after_truncate_size,
before_delete_size: before_delete_size,
cached_tags_count: 0
)
expect(result).to eq(service_response)
end
end
where(:max_list_size, :delete_tags_service_status, :expected_status, :expected_truncated) do
10 | :success | :success | false
10 | :error | :error | false
3 | :success | :error | true
3 | :error | :error | true
0 | :success | :success | false
0 | :error | :error | false
end
with_them do
before do
stub_application_setting(container_registry_cleanup_tags_service_max_list_size: max_list_size)
allow_next_instance_of(Projects::ContainerRepository::DeleteTagsService) do |service|
allow(service).to receive(:execute).and_return(status: delete_tags_service_status)
end
end
original_size = 7
keep_n = 1
it_behaves_like(
'returning the response',
status: params[:expected_status],
original_size: original_size,
before_truncate_size: original_size - keep_n,
after_truncate_size: params[:expected_truncated] ? params[:max_list_size] + keep_n : original_size - keep_n,
# one tag is filtered out with older_than filter
before_delete_size: params[:expected_truncated] ? params[:max_list_size] : original_size - keep_n - 1
)
end
end
context 'with caching', :freeze_time do
let(:params) do
{
'name_regex_delete' => '.*',
'keep_n' => 1,
'older_than' => '1 day',
'container_expiration_policy' => true
}
end
let(:tags_and_created_ats) do
{
'A' => 1.hour.ago,
'Ba' => 5.days.ago,
'Bb' => 5.days.ago,
'C' => 1.month.ago,
'D' => nil,
'E' => nil
}
end
let(:cacheable_tags) { tags_and_created_ats.reject { |_, value| value.nil? } }
before do
expect_delete(%w[Bb Ba C], container_expiration_policy: true)
# We froze time so we need to set the created_at stubs again
stub_digest_config('sha256:configA', 1.hour.ago)
stub_digest_config('sha256:configB', 5.days.ago)
stub_digest_config('sha256:configC', 1.month.ago)
end
it 'caches the created_at values' do
expect_mget(tags_and_created_ats.keys)
expect_set(cacheable_tags)
expect(subject).to include(cached_tags_count: 0)
end
context 'with cached values' do
before do
::Gitlab::Redis::Cache.with do |redis|
redis.set(cache_key('C'), rfc3339(1.month.ago))
end
end
it 'uses them' do
expect_mget(tags_and_created_ats.keys)
# because C is already in cache, it should not be cached again
expect_set(cacheable_tags.except('C'))
# We will ping the container registry for all tags *except* for C because it's cached
expect(ContainerRegistry::Blob)
.to receive(:new).with(repository, { "digest" => "sha256:configA" }).and_call_original
expect(ContainerRegistry::Blob)
.to receive(:new).with(repository, { "digest" => "sha256:configB" }).twice.and_call_original
expect(ContainerRegistry::Blob).not_to receive(:new).with(repository, { "digest" => "sha256:configC" })
expect(ContainerRegistry::Blob)
.to receive(:new).with(repository, { "digest" => "sha256:configD" }).and_call_original
expect(subject).to include(cached_tags_count: 1)
end
end
def expect_mget(keys)
Gitlab::Redis::Cache.with do |redis|
parameters = keys.map { |k| cache_key(k) }
expect(redis).to receive(:mget).with(parameters).and_call_original
end
end
def expect_set(tags)
selected_tags = tags.map do |tag_name, created_at|
ex = 1.day.seconds - (Time.zone.now - created_at).seconds
[tag_name, created_at, ex.to_i] if ex.positive?
end.compact
return if selected_tags.count.zero?
Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:pipelined).and_call_original
expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
selected_tags.each do |tag_name, created_at, ex|
expect(pipeline).to receive(:set).with(cache_key(tag_name), rfc3339(created_at), ex: ex).and_call_original
end
end
end
end
def cache_key(tag_name)
"container_repository:{#{repository.id}}:tag:#{tag_name}:created_at"
end
def rfc3339(date_time)
# DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339
# The caching will use DateTime rfc3339
DateTime.rfc3339(date_time.rfc3339).rfc3339
end
end
end
private
def stub_tag_digest(tag, digest)
allow(repository.client)
.to receive(:repository_tag_digest)
.with(repository.path, tag) { digest }
allow(repository.client)
.to receive(:repository_manifest)
.with(repository.path, tag) do
{ 'config' => { 'digest' => digest } } if digest
end
end
def stub_digest_config(digest, created_at)
allow(repository.client)
.to receive(:blob)
.with(repository.path, digest, nil) do
{ 'created' => created_at.to_datetime.rfc3339 }.to_json if created_at
end
end
def expect_caching
::Gitlab::Redis::Cache.with do |redis|
expect(redis).to receive(:mget).and_call_original
expect(redis).to receive(:pipelined).and_call_original
expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
expect(pipeline).to receive(:set).and_call_original
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module GroupLinks
class DestroyService < BaseService
def execute(group_link, skip_authorization: false)
unless valid_to_destroy?(group_link, skip_authorization)
return ServiceResponse.error(message: 'Not found', reason: :not_found)
end
if group_link.project.private?
TodosDestroyer::ProjectPrivateWorker.perform_in(Todo::WAIT_FOR_DELETE, project.id)
else
TodosDestroyer::ConfidentialIssueWorker.perform_in(Todo::WAIT_FOR_DELETE, nil, project.id)
end
link = group_link.destroy
refresh_project_authorizations_asynchronously(link.project)
# Until we compare the inconsistency rates of the new specialized worker and
# the old approach, we still run AuthorizedProjectsWorker
# but with some delay and lower urgency as a safety net.
link.group.refresh_members_authorized_projects(
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
ServiceResponse.success(payload: { link: link })
end
private
def valid_to_destroy?(group_link, skip_authorization)
return false unless group_link
return true if skip_authorization
current_user.can?(:admin_project_group_link, group_link)
end
def refresh_project_authorizations_asynchronously(project)
AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project.id)
end
end
end
end
Projects::GroupLinks::DestroyService.prepend_mod_with('Projects::GroupLinks::DestroyService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::GroupLinks::DestroyService, '#execute', feature_category: :groups_and_projects do
let_it_be(:user) { create :user }
let_it_be(:project) { create(:project, :private) }
let_it_be(:group) { create(:group) }
let_it_be(:group_user) { create(:user).tap { |user| group.add_guest(user) } }
let(:group_access) { Gitlab::Access::DEVELOPER }
let!(:group_link) { create(:project_group_link, project: project, group: group, group_access: group_access) }
subject { described_class.new(project, user) }
shared_examples_for 'removes group from project' do
it 'removes group from project' do
expect { subject.execute(group_link) }.to change { project.reload.project_group_links.count }.from(1).to(0)
end
end
shared_examples_for 'returns not_found' do
it do
expect do
result = subject.execute(group_link)
expect(result[:status]).to eq(:error)
expect(result[:reason]).to eq(:not_found)
end.not_to change { project.reload.project_group_links.count }
end
end
context 'if group_link is blank' do
let!(:group_link) { nil }
it_behaves_like 'returns not_found'
end
context 'if the user does not have access to destroy the link' do
it_behaves_like 'returns not_found'
end
context 'when the user has proper permissions to remove a group-link from a project' do
context 'when the user is a MAINTAINER in the project' do
before do
project.add_maintainer(user)
end
it_behaves_like 'removes group from project'
context 'project authorizations refresh' do
it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
.to receive(:perform_async).with(group_link.project.id)
subject.execute(group_link)
end
it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay to update project authorizations' do
stub_feature_flags(do_not_run_safety_net_auth_refresh_jobs: false)
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in).with(
1.hour,
[[group_user.id]],
batch_delay: 30.seconds, batch_size: 100
)
)
subject.execute(group_link)
end
it 'updates project authorizations of users who had access to the project via the group share', :sidekiq_inline do
expect { subject.execute(group_link) }.to(
change { Ability.allowed?(group_user, :read_project, project) }
.from(true).to(false))
end
end
describe 'todos cleanup' do
context 'when project is private' do
it 'triggers todos cleanup' do
expect(TodosDestroyer::ProjectPrivateWorker).to receive(:perform_in).with(Todo::WAIT_FOR_DELETE, project.id)
expect(project.private?).to be true
subject.execute(group_link)
end
end
context 'when project is public or internal' do
shared_examples_for 'removes confidential todos' do
it 'does not trigger todos cleanup' do
expect(TodosDestroyer::ProjectPrivateWorker).not_to receive(:perform_in).with(Todo::WAIT_FOR_DELETE, project.id)
expect(TodosDestroyer::ConfidentialIssueWorker).to receive(:perform_in).with(Todo::WAIT_FOR_DELETE, nil, project.id)
expect(project.private?).to be false
subject.execute(group_link)
end
end
context 'when project is public' do
let(:project) { create(:project, :public) }
it_behaves_like 'removes confidential todos'
end
context 'when project is internal' do
let(:project) { create(:project, :public) }
it_behaves_like 'removes confidential todos'
end
end
end
end
end
context 'when skipping authorization' do
context 'without providing a user' do
it 'destroys the link' do
expect do
described_class.new(project, nil).execute(group_link, skip_authorization: true)
end.to change { project.reload.project_group_links.count }.by(-1)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module GroupLinks
class UpdateService < BaseService
def initialize(group_link, user = nil)
super(group_link.project, user)
@group_link = group_link
end
def execute(group_link_params)
return ServiceResponse.error(message: 'Not found', reason: :not_found) unless allowed_to_update?
group_link.update!(group_link_params)
refresh_authorizations if requires_authorization_refresh?(group_link_params)
ServiceResponse.success
end
private
attr_reader :group_link
def allowed_to_update?
current_user.can?(:admin_project_member, project)
end
def refresh_authorizations
AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project.id)
# Until we compare the inconsistency rates of the new specialized worker and
# the old approach, we still run AuthorizedProjectsWorker
# but with some delay and lower urgency as a safety net.
group_link.group.refresh_members_authorized_projects(
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
end
def requires_authorization_refresh?(params)
params.include?(:group_access)
end
end
end
end
Projects::GroupLinks::UpdateService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::GroupLinks::UpdateService, '#execute', feature_category: :groups_and_projects do
let_it_be(:user) { create :user }
let_it_be(:group) { create :group }
let_it_be(:project) { create :project }
let_it_be(:group_user) { create(:user).tap { |user| group.add_developer(user) } }
let(:group_access) { Gitlab::Access::DEVELOPER }
let!(:link) { create(:project_group_link, project: project, group: group, group_access: group_access) }
let(:expiry_date) { 1.month.from_now.to_date }
let(:group_link_params) do
{ group_access: Gitlab::Access::GUEST,
expires_at: expiry_date }
end
subject { described_class.new(link, user).execute(group_link_params) }
shared_examples_for 'returns not_found' do
it do
result = subject
expect(result[:status]).to eq(:error)
expect(result[:reason]).to eq(:not_found)
end
end
context 'when the user does not have proper permissions to update a project group link' do
it_behaves_like 'returns not_found'
end
context 'when user has proper permissions to update a project group link' do
context 'when the user is a MAINTAINER in the project' do
before do
project.add_maintainer(user)
end
it 'updates existing link' do
expect(link.group_access).to eq(Gitlab::Access::DEVELOPER)
expect(link.expires_at).to be_nil
subject
link.reload
expect(link.group_access).to eq(Gitlab::Access::GUEST)
expect(link.expires_at).to eq(expiry_date)
end
context 'project authorizations update' do
it 'calls AuthorizedProjectUpdate::ProjectRecalculateWorker to update project authorizations' do
expect(AuthorizedProjectUpdate::ProjectRecalculateWorker)
.to receive(:perform_async).with(link.project.id)
subject
end
it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker ' \
'with a delay to update project authorizations' do
stub_feature_flags(do_not_run_safety_net_auth_refresh_jobs: false)
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in).with(
1.hour,
[[group_user.id]],
batch_delay: 30.seconds, batch_size: 100
)
)
subject
end
it 'updates project authorizations of users who had access to the project via the group share',
:sidekiq_inline do
expect { subject }.to(
change { Ability.allowed?(group_user, :developer_access, project) }
.from(true).to(false))
end
end
context 'with only param not requiring authorization refresh' do
let(:group_link_params) { { expires_at: Date.tomorrow } }
it 'does not perform any project authorizations update using ' \
'`AuthorizedProjectUpdate::ProjectRecalculateWorker`' do
expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).not_to receive(:perform_async)
subject
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module GroupLinks
class CreateService < BaseService
include GroupLinkable
def initialize(project, shared_with_group, user, params)
@shared_with_group = shared_with_group
super(project, user, params)
end
private
delegate :root_ancestor, to: :project
def valid_to_create?
can?(current_user, :read_namespace_via_membership, shared_with_group) && sharing_allowed?
end
def build_link
@link = project.project_group_links.new(
group: shared_with_group,
group_access: params[:link_group_access],
expires_at: params[:expires_at]
)
end
def setup_authorizations
AuthorizedProjectUpdate::ProjectRecalculateWorker.perform_async(project.id)
# AuthorizedProjectsWorker uses an exclusive lease per user but
# specialized workers might have synchronization issues. Until we
# compare the inconsistency rates of both approaches, we still run
# AuthorizedProjectsWorker but with some delay and lower urgency as a
# safety net.
shared_with_group.refresh_members_authorized_projects(
priority: UserProjectAccessChangedService::LOW_PRIORITY
)
end
end
end
end
Projects::GroupLinks::CreateService.prepend_mod_with('Projects::GroupLinks::CreateService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::GroupLinks::CreateService, '#execute', feature_category: :groups_and_projects do
let_it_be(:user) { create :user }
let_it_be(:group) { create :group }
let_it_be(:project) { create(:project, namespace: create(:namespace, :with_namespace_settings)) }
let_it_be(:group_user) { create(:user).tap { |user| group.add_guest(user) } }
let(:opts) do
{
link_group_access: Gitlab::Access::DEVELOPER,
expires_at: nil
}
end
subject { described_class.new(project, group, user, opts) }
shared_examples_for 'not shareable' do
it 'does not share and returns an error' do
expect do
result = subject.execute
expect(result[:status]).to eq(:error)
expect(result[:http_status]).to eq(404)
end.not_to change { project.project_group_links.count }
end
end
shared_examples_for 'shareable' do
it 'adds group to project' do
expect do
result = subject.execute
expect(result[:status]).to eq(:success)
end.to change { project.project_group_links.count }.from(0).to(1)
end
end
context 'when user has proper permissions to share a project with a group' do
before do
group.add_guest(user)
end
context 'when the user is a MAINTAINER in the project' do
before do
project.add_maintainer(user)
end
it_behaves_like 'shareable'
it 'updates authorization', :sidekiq_inline do
expect { subject.execute }.to(
change { Ability.allowed?(group_user, :read_project, project) }
.from(false).to(true))
end
context 'with specialized project_authorization workers' do
let_it_be(:other_user) { create(:user) }
before do
group.add_developer(other_user)
end
it 'schedules authorization update for users with access to group' do
stub_feature_flags(do_not_run_safety_net_auth_refresh_jobs: false)
expect(AuthorizedProjectsWorker).not_to(
receive(:bulk_perform_async)
)
expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to(
receive(:perform_async)
.with(project.id)
.and_call_original
)
expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
receive(:bulk_perform_in).with(
1.hour,
array_including([user.id], [other_user.id]),
batch_delay: 30.seconds, batch_size: 100
).and_call_original
)
subject.execute
end
end
context 'when sharing outside the hierarchy is disabled' do
let_it_be(:shared_group_parent) do
create(:group,
namespace_settings: create(:namespace_settings, prevent_sharing_groups_outside_hierarchy: true)
)
end
let_it_be(:project, reload: true) { create(:project, group: shared_group_parent) }
it_behaves_like 'not shareable'
context 'when group is inside hierarchy' do
let(:group) { create(:group, :private, parent: shared_group_parent) }
it_behaves_like 'shareable'
end
end
end
end
context 'when user does not have permissions to share the project with a group' do
it_behaves_like 'not shareable'
end
context 'when group is blank' do
let(:group) { nil }
it_behaves_like 'not shareable'
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module ImportExport
class ExportService < BaseService
def initialize(*args)
super
@shared = project.import_export_shared
@logger = Gitlab::Export::Logger.build
end
def execute(after_export_strategy = nil)
unless project.template_source? || can?(current_user, :admin_project, project)
raise ::Gitlab::ImportExport::Error.permission_error(current_user, project)
end
save_all!
execute_after_export_action(after_export_strategy)
ensure
cleanup
end
def exporters
[
version_saver, avatar_saver, project_tree_saver, uploads_saver,
repo_saver, wiki_repo_saver, lfs_saver, snippets_repo_saver, design_repo_saver
]
end
protected
def extra_attributes_for_measurement
{
current_user: current_user&.name,
project_full_path: project&.full_path,
file_path: shared.export_path
}
end
private
attr_accessor :shared
attr_reader :logger
def execute_after_export_action(after_export_strategy)
return unless after_export_strategy
unless after_export_strategy.execute(current_user, project)
notify_error
end
end
def save_all!
log_info('Project export started')
if save_exporters && save_export_archive
log_info('Project successfully exported')
else
notify_error!
end
end
def save_exporters
exporters.all? do |exporter|
log_info("#{exporter.class.name} saver started")
exporter.save
end
end
def save_export_archive
@export_saver ||= Gitlab::ImportExport::Saver.save(exportable: project, shared: shared)
end
def version_saver
@version_saver ||= Gitlab::ImportExport::VersionSaver.new(shared: shared)
end
def avatar_saver
@avatar_saver ||= Gitlab::ImportExport::AvatarSaver.new(project: project, shared: shared)
end
def project_tree_saver
@project_tree_saver ||= tree_saver_class.new(
project: project,
current_user: current_user,
shared: shared,
params: params,
logger: logger)
end
def tree_saver_class
Gitlab::ImportExport::Project::TreeSaver
end
def uploads_saver
@uploads_saver ||= Gitlab::ImportExport::UploadsSaver.new(project: project, shared: shared)
end
def repo_saver
@repo_saver ||= Gitlab::ImportExport::RepoSaver.new(exportable: project, shared: shared)
end
def wiki_repo_saver
@wiki_repo_saver ||= Gitlab::ImportExport::WikiRepoSaver.new(exportable: project, shared: shared)
end
def lfs_saver
@lfs_saver ||= Gitlab::ImportExport::LfsSaver.new(project: project, shared: shared)
end
def snippets_repo_saver
@snippets_repo_saver ||= Gitlab::ImportExport::SnippetsRepoSaver.new(
current_user: current_user,
project: project,
shared: shared
)
end
def design_repo_saver
@design_repo_saver ||= Gitlab::ImportExport::DesignRepoSaver.new(exportable: project, shared: shared)
end
def cleanup
FileUtils.rm_rf(shared.archive_path) if shared&.archive_path
end
def notify_error!
notify_error
raise Gitlab::ImportExport::Error, shared.errors.to_sentence
end
def log_info(message)
logger.info(
message: message,
**log_base_data
)
end
def notify_error
logger.error(
message: 'Project export error',
export_errors: shared.errors.join(', '),
**log_base_data
)
user = current_user
errors = shared.errors
project.run_after_commit_or_now do |project|
NotificationService.new.project_not_exported(project, user, errors)
end
end
def log_base_data
@log_base_data ||= Gitlab::ImportExport::LogUtil.exportable_to_log_payload(project)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ImportExport::ExportService, feature_category: :importers do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, group: group) }
let(:shared) { project.import_export_shared }
let!(:after_export_strategy) { Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy.new }
subject(:service) { described_class.new(project, user) }
before do
project.add_maintainer(user)
end
it 'saves the version' do
expect(Gitlab::ImportExport::VersionSaver).to receive(:new).and_call_original
service.execute
end
it 'saves the avatar' do
expect(Gitlab::ImportExport::AvatarSaver).to receive(:new).and_call_original
service.execute
end
it 'saves the models' do
saver_params = {
project: project,
current_user: user,
shared: shared,
params: {},
logger: an_instance_of(Gitlab::Export::Logger)
}
expect(Gitlab::ImportExport::Project::TreeSaver).to receive(:new).with(saver_params).and_call_original
service.execute
end
it 'saves the uploads' do
expect(Gitlab::ImportExport::UploadsSaver).to receive(:new).and_call_original
service.execute
end
it 'saves the repo' do
# This spec errors when run against the EE codebase as there will be a third repository
# saved (the EE-specific design repository).
#
# Instead, skip this test when run within EE. There is a spec for the EE-specific design repo
# in the corresponding EE spec.
skip if Gitlab.ee?
# once for the normal repo, once for the wiki repo, and once for the design repo
expect(Gitlab::ImportExport::RepoSaver).to receive(:new).exactly(3).times.and_call_original
service.execute
end
it 'saves the wiki repo' do
expect(Gitlab::ImportExport::WikiRepoSaver).to receive(:new).and_call_original
service.execute
end
it 'saves the design repo' do
expect(Gitlab::ImportExport::DesignRepoSaver).to receive(:new).and_call_original
service.execute
end
it 'saves the lfs objects' do
expect(Gitlab::ImportExport::LfsSaver).to receive(:new).and_call_original
service.execute
end
it 'saves the snippets' do
expect_next_instance_of(Gitlab::ImportExport::SnippetsRepoSaver) do |instance|
expect(instance).to receive(:save).and_call_original
end
service.execute
end
context 'when all saver services succeed' do
before do
allow(service).to receive(:save_exporters).and_return(true)
end
it 'logs a successful message' do
allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
expect(service.instance_variable_get(:@logger)).to receive(:info).ordered.with(
hash_including({ message: 'Project export started', project_id: project.id })
)
expect(service.instance_variable_get(:@logger)).to receive(:info).ordered.with(
hash_including({ message: 'Project successfully exported', project_id: project.id })
)
service.execute
end
it 'saves the project in the file system' do
expect(Gitlab::ImportExport::Saver).to receive(:save).with(exportable: project, shared: shared).and_return(true)
service.execute
end
context 'when the upload fails' do
before do
expect(Gitlab::ImportExport::Saver).to receive(:save).with(exportable: project, shared: shared).and_return(false)
end
it 'notifies the user of an error' do
expect(service).to receive(:notify_error).and_call_original
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
it 'calls the after export strategy' do
allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
expect(after_export_strategy).to receive(:execute)
service.execute(after_export_strategy)
end
context 'when after export strategy fails' do
before do
allow(after_export_strategy).to receive(:execute).and_return(false)
allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
end
after do
service.execute(after_export_strategy)
end
it 'removes the remaining exported data' do
allow(shared).to receive(:archive_path).and_return('whatever')
allow(FileUtils).to receive(:rm_rf)
expect(FileUtils).to receive(:rm_rf).with(shared.archive_path)
end
it 'notifies the user' do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:project_not_exported)
end
end
it 'notifies logger' do
expect(service.instance_variable_get(:@logger)).to receive(:error).with(
hash_including({ message: 'Project export error', project_id: project.id })
)
end
end
end
context 'when saving services fail' do
before do
allow(service).to receive(:save_exporters).and_return(false)
end
after do
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
it 'removes the remaining exported data' do
allow(shared).to receive(:archive_path).and_return('whatever')
allow(FileUtils).to receive(:rm_rf)
expect(FileUtils).to receive(:rm_rf).with(shared.archive_path)
end
it 'notifies the user' do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:project_not_exported)
end
end
it 'notifies logger' do
expect(service.instance_variable_get(:@logger)).to receive(:error)
end
it 'does not call the export strategy' do
expect(service).not_to receive(:execute_after_export_action)
end
end
context 'when one of the savers fail unexpectedly' do
let(:archive_path) { shared.archive_path }
before do
allow(service).to receive_message_chain(:uploads_saver, :save).and_return(false)
end
it 'removes the remaining exported data' do
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
expect(project.import_export_upload).to be_nil
expect(File.exist?(shared.archive_path)).to eq(false)
end
end
context 'when user does not have admin_project permission' do
let!(:another_user) { create(:user) }
subject(:service) { described_class.new(project, another_user) }
it 'fails' do
expected_message =
"User with ID: %s does not have required permissions for Project: %s with ID: %s" %
[another_user.id, project.name, project.id]
expect { service.execute }.to raise_error(Gitlab::ImportExport::Error).with_message(expected_message)
end
end
it "avoids N+1 when exporting project members" do
group.add_owner(user)
group.add_maintainer(create(:user))
project.add_maintainer(create(:user))
# warm up
service.execute
control = ActiveRecord::QueryRecorder.new { service.execute }
group.add_maintainer(create(:user))
project.add_maintainer(create(:user))
expect { service.execute }.not_to exceed_query_limit(control)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module ImportExport
class ParallelExportService
def initialize(export_job, current_user, after_export_strategy)
@export_job = export_job
@current_user = current_user
@after_export_strategy = after_export_strategy
@shared = project.import_export_shared
@logger = Gitlab::Export::Logger.build
end
def execute
log_info('Parallel project export started')
if save_exporters && save_export_archive
log_info('Parallel project export finished successfully')
execute_after_export_action(after_export_strategy)
else
notify_error
end
ensure
cleanup
end
private
attr_reader :export_job, :current_user, :after_export_strategy, :shared, :logger
delegate :project, to: :export_job
def execute_after_export_action(after_export_strategy)
return if after_export_strategy.execute(current_user, project)
notify_error
end
def exporters
[version_saver, exported_relations_merger]
end
def save_exporters
exporters.all? do |exporter|
log_info("Parallel project export - #{exporter.class.name} saver started")
exporter.save
end
end
def save_export_archive
Gitlab::ImportExport::Saver.save(exportable: project, shared: shared)
end
def version_saver
@version_saver ||= Gitlab::ImportExport::VersionSaver.new(shared: shared)
end
def exported_relations_merger
@relation_saver ||= Gitlab::ImportExport::Project::ExportedRelationsMerger.new(
export_job: export_job,
shared: shared)
end
def cleanup
FileUtils.rm_rf(shared.export_path) if File.exist?(shared.export_path)
FileUtils.rm_rf(shared.archive_path) if File.exist?(shared.archive_path)
end
def log_info(message)
logger.info(
message: message,
**log_base_data
)
end
def notify_error
logger.error(
message: 'Parallel project export error',
export_errors: shared.errors.join(', '),
export_job_id: export_job.id,
**log_base_data
)
NotificationService.new.project_not_exported(project, current_user, shared.errors)
end
def log_base_data
{
project_id: project.id,
project_name: project.name,
project_path: project.full_path
}
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ImportExport::ParallelExportService, feature_category: :importers do
let_it_be(:user) { create(:user) }
let(:export_job) { create(:project_export_job) }
let(:after_export_strategy) { Gitlab::ImportExport::AfterExportStrategies::DownloadNotificationStrategy.new }
let(:project) { export_job.project }
before do
allow_next_instance_of(Gitlab::ImportExport::Project::ExportedRelationsMerger) do |saver|
allow(saver).to receive(:save).and_return(true)
end
allow_next_instance_of(Gitlab::ImportExport::VersionSaver) do |saver|
allow(saver).to receive(:save).and_return(true)
end
end
describe '#execute' do
subject(:service) { described_class.new(export_job, user, after_export_strategy) }
it 'creates a project export archive file' do
expect(Gitlab::ImportExport::Saver).to receive(:save)
.with(exportable: project, shared: project.import_export_shared)
service.execute
end
it 'logs export progress' do
allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
logger = service.instance_variable_get(:@logger)
messages = [
'Parallel project export started',
'Parallel project export - Gitlab::ImportExport::VersionSaver saver started',
'Parallel project export - Gitlab::ImportExport::Project::ExportedRelationsMerger saver started',
'Parallel project export finished successfully'
]
messages.each do |message|
expect(logger).to receive(:info).ordered.with(hash_including(message: message))
end
service.execute
end
it 'executes after export stragegy on export success' do
allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
expect(after_export_strategy).to receive(:execute)
service.execute
end
it 'ensures files are cleaned up' do
shared = project.import_export_shared
FileUtils.mkdir_p(shared.archive_path)
FileUtils.mkdir_p(shared.export_path)
allow(Gitlab::ImportExport::Saver).to receive(:save).and_raise(StandardError)
expect { service.execute }.to raise_error(StandardError)
expect(File.exist?(shared.export_path)).to eq(false)
expect(File.exist?(shared.archive_path)).to eq(false)
end
context 'when export fails' do
it 'notifies the error to the user' do
allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(false)
allow(project.import_export_shared).to receive(:errors).and_return(['Error'])
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:project_not_exported).with(project, user, ['Error'])
end
service.execute
end
end
context 'when after export stragegy fails' do
it 'notifies the error to the user' do
allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
allow(after_export_strategy).to receive(:execute).and_return(false)
allow(project.import_export_shared).to receive(:errors).and_return(['Error'])
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:project_not_exported).with(project, user, ['Error'])
end
service.execute
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module ImportExport
class RelationExportService
include Gitlab::ImportExport::CommandLineUtil
def initialize(relation_export, jid)
@relation_export = relation_export
@jid = jid
@logger = Gitlab::Export::Logger.build
end
def execute
relation_export.update!(status_event: :start, jid: jid)
mkdir_p(shared.export_path)
mkdir_p(shared.archive_path)
if relation_saver.save
compress_export_path
upload_compressed_file
relation_export.finish!
else
fail_export(shared.errors.join(', '))
end
rescue StandardError => e
fail_export(e.message)
ensure
FileUtils.remove_entry(shared.export_path) if File.exist?(shared.export_path)
FileUtils.remove_entry(shared.archive_path) if File.exist?(shared.archive_path)
end
private
attr_reader :relation_export, :jid, :logger
delegate :relation, :project_export_job, to: :relation_export
delegate :project, to: :project_export_job
def shared
project.import_export_shared
end
def relation_saver
case relation
when Projects::ImportExport::RelationExport::UPLOADS_RELATION
Gitlab::ImportExport::UploadsSaver.new(project: project, shared: shared)
when Projects::ImportExport::RelationExport::REPOSITORY_RELATION
Gitlab::ImportExport::RepoSaver.new(exportable: project, shared: shared)
when Projects::ImportExport::RelationExport::WIKI_REPOSITORY_RELATION
Gitlab::ImportExport::WikiRepoSaver.new(exportable: project, shared: shared)
when Projects::ImportExport::RelationExport::LFS_OBJECTS_RELATION
Gitlab::ImportExport::LfsSaver.new(project: project, shared: shared)
when Projects::ImportExport::RelationExport::SNIPPETS_REPOSITORY_RELATION
Gitlab::ImportExport::SnippetsRepoSaver.new(project: project, shared: shared, current_user: nil)
when Projects::ImportExport::RelationExport::DESIGN_REPOSITORY_RELATION
Gitlab::ImportExport::DesignRepoSaver.new(exportable: project, shared: shared)
else
Gitlab::ImportExport::Project::RelationSaver.new(
project: project,
shared: shared,
relation: relation
)
end
end
def upload_compressed_file
upload = relation_export.build_upload
File.open(archive_file_full_path) { |file| upload.export_file = file }
upload.save!
end
def compress_export_path
tar_czf(archive: archive_file_full_path, dir: shared.export_path)
end
def archive_file_full_path
@archive_file ||= File.join(shared.archive_path, "#{relation}.tar.gz")
end
def fail_export(error_message)
relation_export.update!(status_event: :fail_op, export_error: error_message.truncate(300))
logger.error(
message: 'Project relation export failed',
export_error: error_message,
relation: relation_export.relation,
project_export_job_id: project_export_job.id,
project_name: project.name,
project_id: project.id
)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ImportExport::RelationExportService, feature_category: :importers do
using RSpec::Parameterized::TableSyntax
subject(:service) { described_class.new(relation_export, 'jid') }
let_it_be(:project_export_job) { create(:project_export_job) }
let_it_be(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let_it_be(:archive_path) { "#{Dir.tmpdir}/project_archive_spec" }
let(:relation_export) { create(:project_relation_export, relation: relation, project_export_job: project_export_job) }
before do
stub_uploads_object_storage(ImportExportUploader, enabled: false)
allow(project_export_job.project.import_export_shared).to receive(:export_path).and_return(export_path)
allow(project_export_job.project.import_export_shared).to receive(:archive_path).and_return(archive_path)
allow(FileUtils).to receive(:remove_entry).with(any_args).and_call_original
end
describe '#execute' do
let(:relation) { 'labels' }
it 'removes temporary paths used to export files' do
expect(FileUtils).to receive(:remove_entry).with(export_path)
expect(FileUtils).to receive(:remove_entry).with(archive_path)
service.execute
end
context 'when saver fails to export relation' do
before do
allow_next_instance_of(Gitlab::ImportExport::Project::RelationSaver) do |saver|
allow(saver).to receive(:save).and_return(false)
end
end
it 'flags export as failed' do
service.execute
expect(relation_export.failed?).to eq(true)
end
it 'logs failed message' do
expect_next_instance_of(Gitlab::Export::Logger) do |logger|
expect(logger).to receive(:error).with(
export_error: '',
message: 'Project relation export failed',
relation: relation_export.relation,
project_export_job_id: project_export_job.id,
project_id: project_export_job.project.id,
project_name: project_export_job.project.name
)
end
service.execute
end
end
context 'when an exception is raised' do
before do
allow_next_instance_of(Gitlab::ImportExport::Project::RelationSaver) do |saver|
allow(saver).to receive(:save).and_raise('Error!')
end
end
it 'flags export as failed' do
service.execute
expect(relation_export.failed?).to eq(true)
expect(relation_export.export_error).to eq('Error!')
end
it 'logs exception error message' do
expect_next_instance_of(Gitlab::Export::Logger) do |logger|
expect(logger).to receive(:error).with(
export_error: 'Error!',
message: 'Project relation export failed',
relation: relation_export.relation,
project_export_job_id: project_export_job.id,
project_id: project_export_job.project.id,
project_name: project_export_job.project.name
)
end
service.execute
end
end
describe 'relation name and saver class' do
where(:relation_name, :saver) do
Projects::ImportExport::RelationExport::UPLOADS_RELATION | Gitlab::ImportExport::UploadsSaver
Projects::ImportExport::RelationExport::REPOSITORY_RELATION | Gitlab::ImportExport::RepoSaver
Projects::ImportExport::RelationExport::WIKI_REPOSITORY_RELATION | Gitlab::ImportExport::WikiRepoSaver
Projects::ImportExport::RelationExport::LFS_OBJECTS_RELATION | Gitlab::ImportExport::LfsSaver
Projects::ImportExport::RelationExport::SNIPPETS_REPOSITORY_RELATION | Gitlab::ImportExport::SnippetsRepoSaver
Projects::ImportExport::RelationExport::DESIGN_REPOSITORY_RELATION | Gitlab::ImportExport::DesignRepoSaver
Projects::ImportExport::RelationExport::ROOT_RELATION | Gitlab::ImportExport::Project::RelationSaver
'labels' | Gitlab::ImportExport::Project::RelationSaver
end
with_them do
let(:relation) { relation_name }
it 'exports relation using correct saver' do
expect(saver).to receive(:new).and_call_original
service.execute
end
it 'assigns finished status and relation file' do
service.execute
expect(relation_export.finished?).to eq(true)
expect(relation_export.upload.export_file.filename).to eq("#{relation}.tar.gz")
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module HashedStorage
class MigrateAttachmentsService < BaseAttachmentService
extend ::Gitlab::Utils::Override
# List of paths that can be excluded while evaluation if a target can be discarded
DISCARDABLE_PATHS = %w[tmp tmp/cache tmp/work].freeze
def initialize(project:, old_disk_path:, logger: nil)
super
@skipped = false
end
def execute
origin = find_old_attachments_path(project)
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
target = FileUploader.absolute_base_dir(project)
@new_disk_path = project.disk_path
result = move_folder!(origin, target)
if result
project.save!(validate: false)
yield if block_given?
end
result
end
override :target_path_discardable?
# Check if target path has discardable content
#
# @param [String] new_path
# @return [Boolean] whether we can discard the target path or not
def target_path_discardable?(new_path)
return false unless File.directory?(new_path)
found = Dir.glob(File.join(new_path, '**', '**'))
(found - discardable_paths(new_path)).empty?
end
private
def discardable_paths(new_path)
DISCARDABLE_PATHS.collect { |path| File.join(new_path, path) }
end
def find_old_attachments_path(project)
origin = FileUploader.absolute_base_dir(project)
# It's possible that old_disk_path does not match project.disk_path.
# For example, that happens when we rename a project
#
origin.sub(/#{Regexp.escape(project.full_path)}\z/, old_disk_path)
end
end
end
end
Projects::HashedStorage::MigrateAttachmentsService.prepend_mod_with('Projects::HashedStorage::MigrateAttachmentsService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::HashedStorage::MigrateAttachmentsService, feature_category: :groups_and_projects do
subject(:service) { described_class.new(project: project, old_disk_path: project.full_path, logger: nil) }
let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::Hashed.new(project) }
let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_disk_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_disk_path) { File.join(base_path(hashed_storage), upload.path) }
describe '#execute' do
context 'when succeeds' do
it 'moves attachments to hashed storage layout' do
expect(File.file?(old_disk_path)).to be_truthy
expect(File.file?(new_disk_path)).to be_falsey
expect(File.exist?(base_path(legacy_storage))).to be_truthy
expect(File.exist?(base_path(hashed_storage))).to be_falsey
expect(FileUtils).to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)).and_call_original
service.execute
expect(File.exist?(base_path(hashed_storage))).to be_truthy
expect(File.exist?(base_path(legacy_storage))).to be_falsey
expect(File.file?(old_disk_path)).to be_falsey
expect(File.file?(new_disk_path)).to be_truthy
end
it 'returns true' do
expect(service.execute).to be_truthy
end
it 'sets skipped to false' do
service.execute
expect(service.skipped?).to be_falsey
end
end
context 'when original folder does not exist anymore' do
before do
FileUtils.rm_rf(base_path(legacy_storage))
end
it 'skips moving folders and go to next' do
expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
service.execute
expect(File.exist?(base_path(hashed_storage))).to be_falsey
expect(File.file?(new_disk_path)).to be_falsey
end
it 'returns true' do
expect(service.execute).to be_truthy
end
it 'sets skipped to true' do
service.execute
expect(service.skipped?).to be_truthy
end
end
context 'when target folder already exists' do
before do
FileUtils.mkdir_p(base_path(hashed_storage))
end
it 'succeed when target is empty' do
expect { service.execute }.not_to raise_error
end
it 'succeed when target include only discardable items' do
Projects::HashedStorage::MigrateAttachmentsService::DISCARDABLE_PATHS.each do |path_fragment|
discardable_path = File.join(base_path(hashed_storage), path_fragment)
FileUtils.mkdir_p(discardable_path)
end
expect { service.execute }.not_to raise_error
end
it 'raises AttachmentCannotMoveError when there are non discardable items on target path' do
not_discardable_path = File.join(base_path(hashed_storage), 'something')
FileUtils.mkdir_p(not_discardable_path)
expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage))
expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentCannotMoveError)
end
end
it 'works even when project validation fails' do
allow(project).to receive(:valid?) { false }
expect { service.execute }.to change { project.hashed_storage?(:attachments) }.to(true)
end
end
describe '#old_disk_path' do
it 'returns old disk_path for project' do
expect(service.old_disk_path).to eq(project.full_path)
end
end
describe '#new_disk_path' do
it 'returns new disk_path for project' do
service.execute
expect(service.new_disk_path).to eq(project.disk_path)
end
end
describe '#target_path_discardable?' do
it 'returns true when it include only items on the discardable list' do
hashed_attachments_path = File.join(base_path(hashed_storage))
Projects::HashedStorage::MigrateAttachmentsService::DISCARDABLE_PATHS.each do |path_fragment|
discardable_path = File.join(hashed_attachments_path, path_fragment)
FileUtils.mkdir_p(discardable_path)
end
expect(service.target_path_discardable?(hashed_attachments_path)).to be_truthy
end
end
def base_path(storage)
File.join(FileUploader.root, storage.disk_path)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module HashedStorage
AttachmentMigrationError = Class.new(StandardError)
AttachmentCannotMoveError = Class.new(StandardError)
class BaseAttachmentService < BaseService
# Returns the disk_path value before the execution
attr_reader :old_disk_path
# Returns the disk_path value after the execution
attr_reader :new_disk_path
# Returns the logger currently in use
attr_reader :logger
def initialize(project:, old_disk_path:, logger: nil)
@project = project
@old_disk_path = old_disk_path
@logger = logger || Gitlab::AppLogger
end
# Return whether this operation was skipped or not
#
# @return [Boolean] true if skipped of false otherwise
def skipped?
@skipped
end
# Check if target path has discardable content
#
# @param [String] new_path
# @return [Boolean] whether we can discard the target path or not
def target_path_discardable?(new_path)
false
end
protected
def move_folder!(old_path, new_path)
unless File.directory?(old_path)
logger.info("Skipped attachments move from '#{old_path}' to '#{new_path}', source path doesn't exist or is not a directory (PROJECT_ID=#{project.id})")
@skipped = true
return true
end
if File.exist?(new_path)
if target_path_discardable?(new_path)
discard_path!(new_path)
else
logger.error("Cannot move attachments from '#{old_path}' to '#{new_path}', target path already exist (PROJECT_ID=#{project.id})")
raise AttachmentCannotMoveError, "Target path '#{new_path}' already exists"
end
end
# Create base path folder on the new storage layout
FileUtils.mkdir_p(File.dirname(new_path))
FileUtils.mv(old_path, new_path)
logger.info("Project attachments moved from '#{old_path}' to '#{new_path}' (PROJECT_ID=#{project.id})")
true
end
# Rename a path adding a suffix in order to prevent data-loss.
#
# @param [String] new_path
def discard_path!(new_path)
discarded_path = "#{new_path}-#{Time.current.utc.to_i}"
logger.info("Moving existing empty attachments folder from '#{new_path}' to '#{discarded_path}', (PROJECT_ID=#{project.id})")
FileUtils.mv(new_path, discarded_path)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::HashedStorage::BaseAttachmentService, feature_category: :groups_and_projects do
let(:project) { create(:project, :repository, storage_version: 0, skip_disk_validation: true) }
subject(:service) { described_class.new(project: project, old_disk_path: project.full_path, logger: nil) }
describe '#old_disk_path' do
it { is_expected.to respond_to :old_disk_path }
end
describe '#new_disk_path' do
it { is_expected.to respond_to :new_disk_path }
end
describe '#skipped?' do
it { is_expected.to respond_to :skipped? }
end
describe '#target_path_discardable?' do
it 'returns false' do
expect(subject.target_path_discardable?('something/something')).to be_falsey
end
end
describe '#discard_path!' do
it 'renames target path adding a timestamp at the end' do
target_path = Dir.mktmpdir
expect(Dir.exist?(target_path)).to be_truthy
freeze_time do
suffix = Time.current.utc.to_i
subject.send(:discard_path!, target_path)
expected_renamed_path = "#{target_path}-#{suffix}"
expect(Dir.exist?(target_path)).to be_falsey
expect(Dir.exist?(expected_renamed_path)).to be_truthy
end
end
end
describe '#move_folder!' do
context 'when old_path is not a directory' do
it 'adds information to the logger and returns true' do
Tempfile.create do |old_path| # rubocop:disable Rails/SaveBang
new_path = "#{old_path}-new"
expect(subject.send(:move_folder!, old_path, new_path)).to be_truthy
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module HashedStorage
class MigrationService < BaseService
attr_reader :logger, :old_disk_path
def initialize(project, old_disk_path, logger: nil)
@project = project
@old_disk_path = old_disk_path
@logger = logger || Gitlab::AppLogger
end
def execute
# Migrate attachments from Legacy to Hashed Storage
unless project.hashed_storage?(:attachments)
return false unless migrate_attachments_service.execute
end
true
end
private
def migrate_attachments_service
HashedStorage::MigrateAttachmentsService.new(project: project, old_disk_path: old_disk_path, logger: logger)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::HashedStorage::MigrationService, feature_category: :groups_and_projects do
let(:project) { create(:project, :empty_repo, :wiki_repo, :legacy_storage) }
let(:logger) { double }
let!(:project_attachment) { build(:file_uploader, project: project) }
let(:project_hashed_path) { Storage::Hashed.new(project).disk_path }
let(:project_legacy_path) { Storage::LegacyProject.new(project).disk_path }
let(:wiki_hashed_path) { "#{project_hashed_path}.wiki" }
let(:wiki_legacy_path) { "#{project_legacy_path}.wiki" }
subject(:service) { described_class.new(project, project.full_path, logger: logger) }
describe '#execute' do
context 'attachments migration' do
let(:project) { create(:project, :empty_repo, :wiki_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
let(:attachments_service) do
Projects::HashedStorage::MigrateAttachmentsService.new(
project: project,
old_disk_path: project.full_path,
logger: logger
)
end
it 'does not delegate migration if attachments are already migrated' do
project.storage_version = ::Project::LATEST_STORAGE_VERSION
expect(Projects::HashedStorage::MigrateAttachmentsService).not_to receive(:new)
service.execute
end
it 'migrates legacy attachments to hashed storage' do
legacy_attachments_path = FileUploader.absolute_base_dir(project)
hashed_project = project.dup.tap { |p| p.id = project.id }
hashed_project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
hashed_attachments_path = FileUploader.absolute_base_dir(hashed_project)
expect(logger).to receive(:info).with(/Project attachments moved from '#{legacy_attachments_path}' to '#{hashed_attachments_path}'/)
expect { service.execute }.to change { project.storage_version }.from(1).to(2)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module Operations
class UpdateService < BaseService
def execute
Projects::UpdateService
.new(project, current_user, project_update_params)
.execute
end
private
def project_update_params
error_tracking_params
.merge(alerting_setting_params)
.merge(prometheus_integration_params)
.merge(incident_management_setting_params)
end
def alerting_setting_params
return {} unless can?(current_user, :read_prometheus_alerts, project)
attr = params[:alerting_setting_attributes]
return {} unless attr
regenerate_token = attr.delete(:regenerate_token)
if regenerate_token
attr[:token] = nil
else
attr = attr.except(:token)
end
{ alerting_setting_attributes: attr }
end
def error_tracking_params
settings = params[:error_tracking_setting_attributes]
return {} if settings.blank?
if error_tracking_params_partial_updates?(settings)
error_tracking_params_for_partial_update(settings)
else
error_tracking_params_for_update(settings)
end
end
def error_tracking_params_partial_updates?(settings)
# Help from @splattael :bow:
# Make sure we're converting to symbols because
# * ActionController::Parameters#keys returns a list of strings
# * in specs we're using hashes with symbols as keys
update_keys = settings.keys.map(&:to_sym)
# Integrated error tracking works without Sentry integration,
# so we don't need to update all those values from error_tracking_params_for_update method.
# Instead we turn it on/off with partial update together with "enabled" attribute.
# But since its optional, we exclude it from the condition below.
update_keys.delete(:integrated)
update_keys == %i[enabled]
end
def error_tracking_params_for_partial_update(settings)
{ error_tracking_setting_attributes: settings }
end
def error_tracking_params_for_update(settings)
api_url = ::ErrorTracking::ProjectErrorTrackingSetting.build_api_url_from(
api_host: settings[:api_host],
project_slug: settings.dig(:project, :slug),
organization_slug: settings.dig(:project, :organization_slug)
)
params = {
error_tracking_setting_attributes: {
api_url: api_url,
enabled: settings[:enabled],
project_name: settings.dig(:project, :name),
organization_name: settings.dig(:project, :organization_name),
sentry_project_id: settings.dig(:project, :sentry_project_id)
}
}
params[:error_tracking_setting_attributes][:token] = settings[:token] unless ::ErrorTracking::SentryClient::Token.masked_token?(settings[:token]) # Don't update token if we receive masked value
params[:error_tracking_setting_attributes][:integrated] = settings[:integrated] unless settings[:integrated].nil?
params
end
def prometheus_integration_params
return {} unless attrs = params[:prometheus_integration_attributes]
integration = project.find_or_initialize_integration(::Integrations::Prometheus.to_param)
integration.assign_attributes(attrs)
attrs = integration.to_database_hash
{ prometheus_integration_attributes: attrs }
end
def incident_management_setting_params
attrs = params[:incident_management_setting_attributes]
return {} unless attrs
regenerate_token = attrs.delete(:regenerate_token)
if regenerate_token
attrs[:pagerduty_token] = nil
else
attrs = attrs.except(:pagerduty_token)
end
{ incident_management_setting_attributes: attrs }
end
end
end
end
Projects::Operations::UpdateService.prepend_mod_with('Projects::Operations::UpdateService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Operations::UpdateService, feature_category: :groups_and_projects do
let_it_be_with_refind(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let(:result) { subject.execute }
subject { described_class.new(project, user, params) }
describe '#execute' do
context 'alerting setting' do
before_all do
project.add_maintainer(user)
end
shared_examples 'no operation' do
it 'does nothing' do
expect(result[:status]).to eq(:success)
expect(project.reload.alerting_setting).to be_nil
end
end
context 'with valid params' do
let(:params) { { alerting_setting_attributes: alerting_params } }
shared_examples 'setting creation' do
it 'creates a setting' do
expect(project.alerting_setting).to be_nil
expect(result[:status]).to eq(:success)
expect(project.reload.alerting_setting).not_to be_nil
end
end
context 'when regenerate_token is not set' do
let(:alerting_params) { { token: 'some token' } }
context 'with an existing setting' do
let!(:alerting_setting) do
create(:project_alerting_setting, project: project)
end
it 'ignores provided token' do
expect(result[:status]).to eq(:success)
expect(project.reload.alerting_setting.token)
.to eq(alerting_setting.token)
end
end
context 'without an existing setting' do
it_behaves_like 'setting creation'
end
end
context 'when regenerate_token is set' do
let(:alerting_params) { { regenerate_token: true } }
context 'with an existing setting' do
let(:token) { 'some token' }
let!(:alerting_setting) do
create(:project_alerting_setting, project: project, token: token)
end
it 'regenerates token' do
expect(result[:status]).to eq(:success)
expect(project.reload.alerting_setting.token).not_to eq(token)
end
end
context 'without an existing setting' do
it_behaves_like 'setting creation'
context 'with insufficient permissions' do
before do
project.add_reporter(user)
end
it_behaves_like 'no operation'
end
end
end
end
context 'with empty params' do
let(:params) { {} }
it_behaves_like 'no operation'
end
end
context 'error tracking' do
context 'with existing error tracking setting' do
let(:params) do
{
error_tracking_setting_attributes: {
enabled: false,
integrated: true,
api_host: 'http://gitlab.com/',
token: 'token',
project: {
slug: 'project',
name: 'Project',
organization_slug: 'org',
organization_name: 'Org'
}
}
}
end
before do
create(:project_error_tracking_setting, project: project)
end
it 'updates the settings' do
expect(result[:status]).to eq(:success)
project.reload
expect(project.error_tracking_setting).not_to be_enabled
expect(project.error_tracking_setting.integrated).to be_truthy
expect(project.error_tracking_setting.api_url).to eq(
'http://gitlab.com/api/0/projects/org/project/'
)
expect(project.error_tracking_setting.token).to eq('token')
expect(project.error_tracking_setting[:project_name]).to eq('Project')
expect(project.error_tracking_setting[:organization_name]).to eq('Org')
end
context 'disable error tracking' do
before do
params[:error_tracking_setting_attributes][:api_host] = ''
params[:error_tracking_setting_attributes][:enabled] = false
end
it 'can set api_url to nil' do
expect(result[:status]).to eq(:success)
project.reload
expect(project.error_tracking_setting).not_to be_enabled
expect(project.error_tracking_setting.api_url).to be_nil
expect(project.error_tracking_setting.token).to eq('token')
expect(project.error_tracking_setting[:project_name]).to eq('Project')
expect(project.error_tracking_setting[:organization_name]).to eq('Org')
end
end
end
context 'without an existing error tracking setting' do
let(:params) do
{
error_tracking_setting_attributes: {
enabled: true,
integrated: true,
api_host: 'http://gitlab.com/',
token: 'token',
project: {
slug: 'project',
name: 'Project',
organization_slug: 'org',
organization_name: 'Org'
}
}
}
end
it 'creates a setting' do
expect(result[:status]).to eq(:success)
expect(project.error_tracking_setting).to be_enabled
expect(project.error_tracking_setting.integrated).to be_truthy
expect(project.error_tracking_setting.api_url).to eq(
'http://gitlab.com/api/0/projects/org/project/'
)
expect(project.error_tracking_setting.token).to eq('token')
expect(project.error_tracking_setting[:project_name]).to eq('Project')
expect(project.error_tracking_setting[:organization_name]).to eq('Org')
end
end
context 'partial_update' do
let(:params) do
{
error_tracking_setting_attributes: {
enabled: true
}
}
end
context 'with setting' do
before do
create(:project_error_tracking_setting, :disabled, project: project)
end
it 'service succeeds' do
expect(result[:status]).to eq(:success)
end
it 'updates attributes' do
expect { result }
.to change { project.reload.error_tracking_setting.enabled }
.from(false)
.to(true)
end
it 'only updates enabled attribute' do
result
expect(project.error_tracking_setting.previous_changes.keys)
.to contain_exactly('enabled')
end
context 'with integrated attribute' do
let(:params) do
{
error_tracking_setting_attributes: {
enabled: true,
integrated: true
}
}
end
it 'updates integrated attribute' do
expect { result }
.to change { project.reload.error_tracking_setting.integrated }
.from(false)
.to(true)
end
it 'only updates enabled and integrated attributes' do
result
expect(project.error_tracking_setting.previous_changes.keys)
.to contain_exactly('enabled', 'integrated')
end
end
end
context 'without setting' do
it 'creates setting with default values' do
expect(result[:status]).to eq(:success)
expect(project.error_tracking_setting.enabled).to be_truthy
expect(project.error_tracking_setting.integrated).to be_truthy
end
end
end
context 'with masked param token' do
let(:params) do
{
error_tracking_setting_attributes: {
api_host: 'https://sentrytest.gitlab.com/',
project: {
slug: 'sentry-project',
organization_slug: 'sentry-org'
},
enabled: false,
token: '*' * 8
}
}
end
before do
create(:project_error_tracking_setting, project: project, token: 'token', api_url: 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/')
end
it 'does not update token' do
expect(result[:status]).to eq(:success)
expect(project.error_tracking_setting.token).to eq('token')
end
end
context 'with invalid parameters' do
let(:params) { {} }
let!(:error_tracking_setting) do
create(:project_error_tracking_setting, project: project)
end
it 'does nothing' do
expect(result[:status]).to eq(:success)
expect(project.reload.error_tracking_setting)
.to eq(error_tracking_setting)
end
end
end
context 'with inappropriate params' do
let(:params) { { name: '' } }
let!(:original_name) { project.name }
it 'ignores params' do
expect(result[:status]).to eq(:success)
expect(project.reload.name).to eq(original_name)
end
end
context 'prometheus integration' do
context 'prometheus params were passed into service' do
let!(:prometheus_integration) do
create(:prometheus_integration, :instance, properties: {
api_url: "http://example.prometheus.com",
manual_configuration: "0",
google_iap_audience_client_id: 123
})
end
let(:params) do
{
prometheus_integration_attributes: {
'api_url' => 'http://new.prometheus.com',
'manual_configuration' => '1'
}
}
end
it 'uses Project#find_or_initialize_integration to include instance defined defaults and pass them to Projects::UpdateService', :aggregate_failures do
expect(Projects::UpdateService).to receive(:new) do |project_arg, user_arg, update_params_hash|
prometheus_attrs = update_params_hash[:prometheus_integration_attributes]
expect(project_arg).to eq project
expect(user_arg).to eq user
expect(prometheus_attrs).to have_key('encrypted_properties')
expect(prometheus_attrs.keys).not_to include(*%w[id project_id created_at updated_at properties])
expect(prometheus_attrs['encrypted_properties']).not_to eq(prometheus_integration.encrypted_properties)
end.and_call_original
expect { subject.execute }.to change(Integrations::Prometheus, :count).by(1)
expect(Integrations::Prometheus.last).to have_attributes(
api_url: 'http://new.prometheus.com',
manual_configuration: true,
google_iap_audience_client_id: 123
)
end
end
context 'when prometheus params are not passed into service' do
let(:params) { { something: :else } }
it 'does not pass any prometheus params into Projects::UpdateService', :aggregate_failures do
project_update_service = double(Projects::UpdateService)
expect(project).not_to receive(:find_or_initialize_integration)
expect(Projects::UpdateService)
.to receive(:new)
.with(project, user, {})
.and_return(project_update_service)
expect(project_update_service).to receive(:execute)
subject.execute
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module AutoDevops
class DisableService < BaseService
def execute
return false unless implicitly_enabled_and_first_pipeline_failure?
disable_auto_devops
end
private
def implicitly_enabled_and_first_pipeline_failure?
project.has_auto_devops_implicitly_enabled? &&
first_pipeline_failure?
end
# We're using `limit` to optimize `auto_devops pipeline` query,
# since we only care about the first element, and using only `.count`
# is an expensive operation. See
# https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/21172#note_99037378
# for more context.
# rubocop: disable CodeReuse/ActiveRecord
def first_pipeline_failure?
auto_devops_pipelines.success.limit(1).count == 0 &&
auto_devops_pipelines.failed.limit(1).count.nonzero?
end
# rubocop: enable CodeReuse/ActiveRecord
def disable_auto_devops
project.auto_devops_attributes = { enabled: false }
project.save!
end
def auto_devops_pipelines
@auto_devops_pipelines ||= project.ci_pipelines.auto_devops_source
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::AutoDevops::DisableService, '#execute', feature_category: :auto_devops do
let(:project) { create(:project, :repository, :auto_devops) }
let(:auto_devops) { project.auto_devops }
subject { described_class.new(project).execute }
context 'when Auto DevOps disabled at instance level' do
before do
stub_application_setting(auto_devops_enabled: false)
end
it { is_expected.to be_falsy }
end
context 'when Auto DevOps enabled at instance level' do
before do
stub_application_setting(auto_devops_enabled: true)
end
context 'when Auto DevOps explicitly enabled on project' do
before do
auto_devops.update!(enabled: true)
end
it { is_expected.to be_falsy }
end
context 'when Auto DevOps explicitly disabled on project' do
before do
auto_devops.update!(enabled: false)
end
it { is_expected.to be_falsy }
end
context 'when Auto DevOps is implicitly enabled' do
before do
auto_devops.update!(enabled: nil)
end
context 'when is the first pipeline failure' do
before do
create(:ci_pipeline, :failed, :auto_devops_source, project: project)
end
it 'disables Auto DevOps for project' do
subject
expect(auto_devops.enabled).to eq(false)
end
end
context 'when it is not the first pipeline failure' do
before do
create_list(:ci_pipeline, 2, :failed, :auto_devops_source, project: project)
end
it 'explicitly disables Auto DevOps for project' do
subject
expect(auto_devops.reload.enabled).to eq(false)
end
end
context 'when an Auto DevOps pipeline has succeeded before' do
before do
create(:ci_pipeline, :success, :auto_devops_source, project: project)
end
it 'does not disable Auto DevOps for project' do
subject
expect(auto_devops.reload.enabled).to be_nil
end
end
end
context 'when project does not have an Auto DevOps record related' do
let(:project) { create(:project, :repository) }
before do
create(:ci_pipeline, :failed, :auto_devops_source, project: project)
end
it 'disables Auto DevOps for project' do
subject
auto_devops = project.reload.auto_devops
expect(auto_devops.enabled).to eq(false)
end
it 'creates a ProjectAutoDevops record' do
expect { subject }.to change { ProjectAutoDevops.count }.from(0).to(1)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module DeployTokens
class DestroyService < BaseService
include DeployTokenMethods
def execute
destroy_deploy_token(@project, params)
end
end
end
end
Projects::DeployTokens::DestroyService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::DeployTokens::DestroyService, feature_category: :continuous_delivery do
it_behaves_like 'a deploy token deletion service' do
let_it_be(:entity) { create(:project) }
let_it_be(:deploy_token_class) { ProjectDeployToken }
let_it_be(:deploy_token) { create(:deploy_token, projects: [entity]) }
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module DeployTokens
class CreateService < BaseService
include DeployTokenMethods
def execute
deploy_token = create_deploy_token_for(@project, current_user, params)
create_deploy_token_payload_for(deploy_token)
end
end
end
end
Projects::DeployTokens::CreateService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::DeployTokens::CreateService, feature_category: :continuous_delivery do
it_behaves_like 'a deploy token creation service' do
let(:entity) { create(:project) }
let(:deploy_token_class) { ProjectDeployToken }
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module Forks
# A service for fetching upstream default branch and merging it to the fork's specified branch.
class SyncService < BaseService
ONGOING_MERGE_ERROR = 'The synchronization did not happen due to another merge in progress'
MergeError = Class.new(StandardError)
def initialize(project, user, target_branch)
super(project, user)
@source_project = project.fork_source
@head_sha = project.repository.commit(target_branch).sha
@target_branch = target_branch
@details = Projects::Forks::Details.new(project, target_branch)
end
def execute
execute_service
ServiceResponse.success
rescue MergeError => e
Gitlab::ErrorTracking.log_exception(e, { project_id: project.id, user_id: current_user.id })
ServiceResponse.error(message: e.message)
ensure
details.exclusive_lease.cancel
end
private
attr_reader :source_project, :head_sha, :target_branch, :details
# The method executes multiple steps:
#
# 1. Gitlab::Git::CrossRepo fetches upstream default branch into a temporary ref and returns new source sha.
# 2. New divergence counts are calculated using the source sha.
# 3. If the fork is not behind, there is nothing to merge -> exit.
# 4. Otherwise, continue with the new source sha.
# 5. If Gitlab::Git::CommandError is raised it means that merge couldn't happen due to a merge conflict. The
# details are updated to transfer this error to the user.
def execute_service
counts = []
source_sha = source_project.commit.sha
Gitlab::Git::CrossRepo.new(repository, source_project.repository)
.execute(source_sha) do |cross_repo_source_sha|
counts = repository.diverging_commit_count(head_sha, cross_repo_source_sha)
ahead, behind = counts
next if behind == 0
execute_with_fetched_source(cross_repo_source_sha, ahead)
end
rescue Gitlab::Git::CommandError => e
details.update!({ sha: head_sha, source_sha: source_sha, counts: counts, has_conflicts: true })
raise MergeError, e.message
end
def execute_with_fetched_source(cross_repo_source_sha, ahead)
with_linked_lfs_pointers(cross_repo_source_sha) do
merge_commit_id = perform_merge(cross_repo_source_sha, ahead)
raise MergeError, ONGOING_MERGE_ERROR unless merge_commit_id
end
end
# This method merges the upstream default branch to the fork specified branch.
# Depending on whether the fork branch is ahead of upstream or not, a different type of
# merge is performed.
#
# If the fork's branch is not ahead of the upstream (only behind), fast-forward merge is performed.
# However, if the fork's branch contains commits that don't exist upstream, a merge commit is created.
# In this case, a conflict may happen, which interrupts the merge and returns a message to the user.
def perform_merge(cross_repo_source_sha, ahead)
if ahead > 0
message = "Merge branch #{source_project.path}:#{source_project.default_branch} into #{target_branch}"
repository.merge_to_branch(current_user,
source_sha: cross_repo_source_sha,
target_branch: target_branch,
target_sha: head_sha,
message: message)
else
repository.ff_merge(current_user, cross_repo_source_sha, target_branch, target_sha: head_sha)
end
end
# This method links the newly merged lfs objects (if any) with the existing ones upstream.
# The LfsLinkService service has a limit and may raise an error if there are too many lfs objects to link.
# This is the reason why the block is passed:
#
# 1. Verify that there are not too many lfs objects to link
# 2. Execute the block (which basically performs the merge)
# 3. Link lfs objects
def with_linked_lfs_pointers(newrev, &block)
return yield unless project.lfs_enabled?
oldrev = head_sha
new_lfs_oids =
Gitlab::Git::LfsChanges
.new(repository, newrev)
.new_pointers(not_in: [oldrev])
.map(&:lfs_oid)
Projects::LfsPointers::LfsLinkService.new(project).execute(new_lfs_oids, &block)
rescue Projects::LfsPointers::LfsLinkService::TooManyOidsError => e
raise MergeError, e.message
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Forks::SyncService, feature_category: :source_code_management do
include ProjectForksHelper
include RepoHelpers
let_it_be(:user) { create(:user) }
let_it_be(:source_project) { create(:project, :repository, :public) }
let_it_be(:project) { fork_project(source_project, user, { repository: true }) }
let(:fork_branch) { project.default_branch }
let(:service) { described_class.new(project, user, fork_branch) }
def details
Projects::Forks::Details.new(project, fork_branch)
end
def expect_to_cancel_exclusive_lease
expect(Gitlab::ExclusiveLease).to receive(:cancel)
end
describe '#execute' do
context 'when fork is up-to-date with the upstream' do
it 'does not perform merge' do
expect_to_cancel_exclusive_lease
expect(project.repository).not_to receive(:merge_to_branch)
expect(project.repository).not_to receive(:ff_merge)
expect(service.execute).to be_success
end
end
context 'when fork is behind the upstream' do
let_it_be(:base_commit) { source_project.commit.sha }
before_all do
source_project.repository.commit_files(
user,
branch_name: source_project.repository.root_ref, message: 'Commit to root ref',
actions: [{ action: :create, file_path: 'encoding/CHANGELOG', content: 'One more' }]
)
source_project.repository.commit_files(
user,
branch_name: source_project.repository.root_ref, message: 'Another commit to root ref',
actions: [{ action: :create, file_path: 'encoding/NEW-CHANGELOG', content: 'One more time' }]
)
end
before do
project.repository.create_branch(fork_branch, base_commit)
end
context 'when fork is not ahead of the upstream' do
let(:fork_branch) { 'fork-without-new-commits' }
it 'updates the fork using ff merge' do
expect_to_cancel_exclusive_lease
expect(project.commit(fork_branch).sha).to eq(base_commit)
expect(project.repository).to receive(:ff_merge)
.with(user, source_project.commit.sha, fork_branch, target_sha: base_commit)
.and_call_original
expect do
expect(service.execute).to be_success
end.to change { details.counts }.from({ ahead: 0, behind: 2 }).to({ ahead: 0, behind: 0 })
end
end
context 'when fork is ahead of the upstream' do
context 'and has conflicts with the upstream', :use_clean_rails_redis_caching do
let(:fork_branch) { 'fork-with-conflicts' }
it 'returns an error' do
project.repository.commit_files(
user,
branch_name: fork_branch, message: 'Committing something',
actions: [{ action: :create, file_path: 'encoding/CHANGELOG', content: 'New file' }]
)
expect_to_cancel_exclusive_lease
expect(details).not_to have_conflicts
expect do
result = service.execute
expect(result).to be_error
expect(result.message).to eq("9:merging commits: merge: there are conflicting files.")
end.not_to change { details.counts }
expect(details).to have_conflicts
end
end
context 'and does not have conflicts with the upstream' do
let(:fork_branch) { 'fork-with-new-commits' }
it 'updates the fork using merge' do
project.repository.commit_files(
user,
branch_name: fork_branch, message: 'Committing completely new changelog',
actions: [{ action: :create, file_path: 'encoding/COMPLETELY-NEW-CHANGELOG', content: 'New file' }]
)
commit_message = "Merge branch #{source_project.path}:#{source_project.default_branch} into #{fork_branch}"
expect(project.repository).to receive(:merge_to_branch).with(
user,
source_sha: source_project.commit.sha,
target_branch: fork_branch,
target_sha: project.commit(fork_branch).sha,
message: commit_message
).and_call_original
expect_to_cancel_exclusive_lease
expect do
expect(service.execute).to be_success
end.to change { details.counts }.from({ ahead: 1, behind: 2 }).to({ ahead: 2, behind: 0 })
commits = project.repository.commits_between(source_project.commit.sha, project.commit(fork_branch).sha)
expect(commits.map(&:message)).to eq([
"Committing completely new changelog",
commit_message
])
end
end
end
context 'when a merge cannot happen due to another ongoing merge' do
it 'does not merge' do
expect(service).to receive(:perform_merge).and_return(nil)
result = service.execute
expect(result).to be_error
expect(result.message).to eq(described_class::ONGOING_MERGE_ERROR)
end
end
context 'when upstream branch contains lfs reference' do
let(:source_project) { create(:project, :repository, :public) }
let(:project) { fork_project(source_project, user, { repository: true }) }
let(:fork_branch) { 'fork-fetches-lfs-pointers' }
before do
source_project.change_head('lfs')
allow(source_project).to receive(:lfs_enabled?).and_return(true)
allow(project).to receive(:lfs_enabled?).and_return(true)
create_file_in_repo(source_project, 'lfs', 'lfs', 'one.lfs', 'One')
create_file_in_repo(source_project, 'lfs', 'lfs', 'two.lfs', 'Two')
end
it 'links fetched lfs objects to the fork project', :aggregate_failures do
expect_to_cancel_exclusive_lease
expect do
expect(service.execute).to be_success
end.to change { project.reload.lfs_objects.size }.from(0).to(2)
.and change { details.counts }.from({ ahead: 0, behind: 3 }).to({ ahead: 0, behind: 0 })
expect(project.lfs_objects).to match_array(source_project.lfs_objects)
end
context 'and there are too many of them for a single sync' do
let(:fork_branch) { 'fork-too-many-lfs-pointers' }
it 'updates the fork successfully' do
expect_to_cancel_exclusive_lease
stub_const('Projects::LfsPointers::LfsLinkService::MAX_OIDS', 1)
expect do
result = service.execute
expect(result).to be_error
expect(result.message).to eq('Too many LFS object ids to link, please push them manually')
end.not_to change { details.counts }
end
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module Prometheus
module Metrics
class DestroyService < Metrics::BaseService
def execute
metric.destroy
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Prometheus::Metrics::DestroyService, feature_category: :metrics do
let(:metric) { create(:prometheus_metric) }
subject { described_class.new(metric) }
it 'destroys metric' do
subject.execute
expect(PrometheusMetric.find_by(id: metric.id)).to be_nil
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Projects
module Prometheus
module Alerts
class NotifyService < ::BaseProjectService
include Gitlab::Utils::StrongMemoize
include ::AlertManagement::Responses
# This set of keys identifies a payload as a valid Prometheus
# payload and thus processable by this service. See also
# https://prometheus.io/docs/alerting/configuration/#webhook_config
REQUIRED_PAYLOAD_KEYS = %w[
version groupKey status receiver groupLabels commonLabels
commonAnnotations externalURL alerts
].to_set.freeze
SUPPORTED_VERSION = '4'
# If feature flag :prometheus_notify_max_alerts is enabled truncate
# alerts to 100 and process only them.
# If feature flag is disabled process any amount of alerts.
#
# This is to mitigate incident:
# https://gitlab.com/gitlab-com/gl-infra/production/-/issues/6086
PROCESS_MAX_ALERTS = 100
def initialize(project, params)
super(project: project, params: params.to_h)
end
def execute(token, integration = nil)
return bad_request unless valid_payload_size?
return unprocessable_entity unless self.class.processable?(params)
return unauthorized unless valid_alert_manager_token?(token, integration)
truncate_alerts! if max_alerts_exceeded?
process_prometheus_alerts(integration)
created
end
def self.processable?(payload)
# Workaround for https://gitlab.com/gitlab-org/gitlab/-/issues/220496
return false unless payload
REQUIRED_PAYLOAD_KEYS.subset?(payload.keys.to_set) &&
payload['version'] == SUPPORTED_VERSION
end
private
def valid_payload_size?
Gitlab::Utils::DeepSize.new(params).valid?
end
def max_alerts_exceeded?
return false unless Feature.enabled?(:prometheus_notify_max_alerts, project, type: :ops)
alerts.size > PROCESS_MAX_ALERTS
end
def truncate_alerts!
Gitlab::AppLogger.warn(
message: 'Prometheus payload exceeded maximum amount of alerts. Truncating alerts.',
project_id: project.id,
alerts: {
total: alerts.size,
max: PROCESS_MAX_ALERTS
}
)
params['alerts'] = alerts.first(PROCESS_MAX_ALERTS)
end
def alerts
params['alerts']
end
def valid_alert_manager_token?(token, integration)
valid_for_alerts_endpoint?(token, integration) ||
valid_for_manual?(token)
end
def valid_for_manual?(token)
# If migration from Integrations::Prometheus to
# AlertManagement::HttpIntegrations is complete,
# we should use use the HttpIntegration as SSOT.
# Remove with https://gitlab.com/gitlab-org/gitlab/-/issues/409734
return false if project.alert_management_http_integrations
.for_endpoint_identifier('legacy-prometheus')
.any?
prometheus = project.find_or_initialize_integration('prometheus')
return false unless prometheus.manual_configuration?
if setting = project.alerting_setting
compare_token(token, setting.token)
else
token.nil?
end
end
def valid_for_alerts_endpoint?(token, integration)
return false unless integration&.active?
compare_token(token, integration.token)
end
def compare_token(expected, actual)
return unless expected && actual
ActiveSupport::SecurityUtils.secure_compare(expected, actual)
end
def process_prometheus_alerts(integration)
alerts.map do |alert|
AlertManagement::ProcessPrometheusAlertService
.new(project, alert, integration: integration)
.execute
end
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Prometheus::Alerts::NotifyService, feature_category: :incident_management do
include PrometheusHelpers
using RSpec::Parameterized::TableSyntax
let_it_be_with_reload(:project) { create(:project) }
let_it_be_with_refind(:setting) do
create(:project_incident_management_setting, project: project, send_email: true, create_issue: true)
end
let(:service) { described_class.new(project, payload) }
let(:token_input) { 'token' }
subject { service.execute(token_input) }
context 'with valid payload' do
let(:payload_raw) { prometheus_alert_payload(firing: ['Alert A'], resolved: ['Alert B']) }
let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
let(:payload_alert_firing) { payload_raw['alerts'].first }
let(:token) { 'token' }
let(:source) { 'Prometheus' }
context 'with manual prometheus installation' do
where(:alerting_setting, :configured_token, :token_input, :result) do
true | token | token | :success
true | token | 'x' | :failure
true | token | nil | :failure
false | nil | nil | :success
false | nil | token | :failure
end
with_them do
let(:alert_manager_token) { token_input }
before do
create(:prometheus_integration, project: project)
if alerting_setting
create(
:project_alerting_setting,
project: project,
token: configured_token
)
end
end
case result = params[:result]
when :success
it_behaves_like 'processes one firing and one resolved prometheus alerts'
when :failure
it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
else
raise "invalid result: #{result.inspect}"
end
end
end
context 'with HTTP integration' do
where(:active, :token, :result) do
:active | :valid | :success
:active | :invalid | :failure
:active | nil | :failure
:inactive | :valid | :failure
nil | nil | :failure
end
with_them do
let(:valid) { integration.token }
let(:invalid) { 'invalid token' }
let(:token_input) { public_send(token) if token }
let(:integration) { create(:alert_management_http_integration, active, project: project) if active }
subject { service.execute(token_input, integration) }
case result = params[:result]
when :success
it_behaves_like 'processes one firing and one resolved prometheus alerts'
when :failure
it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
else
raise "invalid result: #{result.inspect}"
end
end
context 'with simultaneous manual configuration' do
let_it_be(:alerting_setting) { create(:project_alerting_setting, :with_http_integration, project: project) }
let_it_be(:old_prometheus_integration) { create(:prometheus_integration, project: project) }
let_it_be(:integration) { project.alert_management_http_integrations.last! }
subject { service.execute(integration.token, integration) }
it_behaves_like 'processes one firing and one resolved prometheus alerts'
context 'when HTTP integration is inactive' do
before do
integration.update!(active: false)
end
it_behaves_like 'alerts service responds with an error and takes no actions', :unauthorized
end
end
end
context 'incident settings' do
before do
create(:prometheus_integration, project: project)
create(:project_alerting_setting, project: project, token: token)
end
it_behaves_like 'processes one firing and one resolved prometheus alerts'
context 'when incident_management_setting does not exist' do
before do
setting.destroy!
end
it { is_expected.to be_success }
include_examples 'does not send alert notification emails'
include_examples 'does not process incident issues'
end
context 'incident_management_setting.send_email is false' do
before do
setting.update!(send_email: false)
end
it { is_expected.to be_success }
include_examples 'does not send alert notification emails'
end
context 'incident_management_setting.create_issue is false' do
before do
setting.update!(create_issue: false)
end
it { is_expected.to be_success }
include_examples 'does not process incident issues'
end
end
context 'process Alert Management alerts' do
let(:integration) { build_stubbed(:alert_management_http_integration, project: project, token: token) }
subject { service.execute(token_input, integration) }
context 'with multiple firing alerts and resolving alerts' do
let(:payload_raw) do
prometheus_alert_payload(firing: ['Alert A', 'Alert A'], resolved: ['Alert B'])
end
it 'processes Prometheus alerts' do
expect(AlertManagement::ProcessPrometheusAlertService)
.to receive(:new)
.with(project, kind_of(Hash), integration: integration)
.exactly(3).times
.and_call_original
subject
end
end
end
context 'when payload exceeds max amount of processable alerts' do
# We are defining 2 alerts in payload_raw above
let(:max_alerts) { 1 }
let(:fingerprint) { prometheus_alert_payload_fingerprint('Alert A') }
before do
stub_const("#{described_class}::PROCESS_MAX_ALERTS", max_alerts)
create(:prometheus_integration, project: project)
create(:project_alerting_setting, project: project, token: token)
create(:alert_management_alert, project: project, fingerprint: fingerprint)
allow(Gitlab::AppLogger).to receive(:warn)
end
shared_examples 'process truncated alerts' do
it 'returns 201 but skips processing and logs a warning', :aggregate_failures do
expect(subject).to be_success
expect(subject.payload).to eq({})
expect(subject.http_status).to eq(:created)
expect(Gitlab::AppLogger)
.to have_received(:warn)
.with(
message: 'Prometheus payload exceeded maximum amount of alerts. Truncating alerts.',
project_id: project.id,
alerts: {
total: 2,
max: max_alerts
})
end
end
shared_examples 'process all alerts' do
it 'returns 201 and process alerts without warnings', :aggregate_failures do
expect(subject).to be_success
expect(subject.payload).to eq({})
expect(subject.http_status).to eq(:created)
expect(Gitlab::AppLogger).not_to have_received(:warn)
end
end
context 'with feature flag globally enabled' do
before do
stub_feature_flags(prometheus_notify_max_alerts: true)
end
include_examples 'process truncated alerts'
end
context 'with feature flag enabled on project' do
before do
stub_feature_flags(prometheus_notify_max_alerts: project)
end
include_examples 'process truncated alerts'
end
context 'with feature flag enabled on unrelated project' do
let(:another_project) { create(:project) }
before do
stub_feature_flags(prometheus_notify_max_alerts: another_project)
end
include_examples 'process all alerts'
end
context 'with feature flag disabled' do
before do
stub_feature_flags(prometheus_notify_max_alerts: false)
end
include_examples 'process all alerts'
end
end
end
context 'with invalid payload' do
context 'when payload is not processable' do
let(:payload) { {} }
before do
allow(described_class).to receive(:processable?).with(payload)
.and_return(false)
end
it_behaves_like 'alerts service responds with an error and takes no actions', :unprocessable_entity
end
context 'when the payload is too big' do
let(:payload_raw) { { 'the-payload-is-too-big' => true } }
let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
before do
stub_const('::Gitlab::Utils::DeepSize::DEFAULT_MAX_DEPTH', 0)
end
it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request
end
end
describe '.processable?' do
let(:valid_payload) { prometheus_alert_payload }
subject { described_class.processable?(payload) }
context 'with valid payload' do
let(:payload) { valid_payload }
it { is_expected.to eq(true) }
context 'containing unrelated keys' do
let(:payload) { valid_payload.merge('unrelated' => 'key') }
it { is_expected.to eq(true) }
end
end
context 'with invalid payload' do
where(:missing_key) do
described_class::REQUIRED_PAYLOAD_KEYS.to_a
end
with_them do
let(:payload) { valid_payload.except(missing_key) }
it { is_expected.to eq(false) }
end
end
context 'with unsupported version' do
let(:payload) { valid_payload.merge('version' => '5') }
it { is_expected.to eq(false) }
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# This service yields operation on each download link from a remote source based on the
# oids provided
module Projects
module LfsPointers
class LfsDownloadLinkListService < BaseService
DOWNLOAD_ACTION = 'download'
# This could be different per server, but it seems like a reasonable value to start with.
# https://github.com/git-lfs/git-lfs/issues/419
REQUEST_BATCH_SIZE = 100
DownloadLinksError = Class.new(StandardError)
DownloadLinkNotFound = Class.new(StandardError)
DownloadLinksRequestEntityTooLargeError = Class.new(StandardError)
attr_reader :remote_uri
def initialize(project, remote_uri: nil)
super(project)
@remote_uri = remote_uri
end
# - oids: hash of oids to query. The structure is { lfs_file_oid => lfs_file_size }
# Yields operation for each link batch-by-batch
def each_link(oids, &block)
return unless project&.lfs_enabled? && remote_uri && oids.present?
download_links_in_batches(oids, &block)
end
private
def download_links_in_batches(oids, batch_size = REQUEST_BATCH_SIZE, &block)
oids.each_slice(batch_size) do |batch|
download_links_for(batch).each(&block)
end
rescue DownloadLinksRequestEntityTooLargeError => e
# Log this exceptions to see how often it happens
Gitlab::ErrorTracking
.track_exception(e, project_id: project&.id, batch_size: batch_size, oids_count: oids.count)
# Try again with a smaller batch
batch_size /= 2
retry if batch_size > REQUEST_BATCH_SIZE / 3
raise DownloadLinksError, 'Unable to download due to RequestEntityTooLarge errors'
end
def download_links_for(oids)
response = Gitlab::HTTP.post(remote_uri, body: request_body(oids), headers: headers)
raise DownloadLinksRequestEntityTooLargeError if response.request_entity_too_large?
raise DownloadLinksError, response.message unless response.success?
# Since the LFS Batch API may return a Content-Ttpe of
# application/vnd.git-lfs+json
# (https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md#requests),
# HTTParty does not know this is actually JSON.
data = Gitlab::Json.parse(response.body)
raise DownloadLinksError, "LFS Batch API did return any objects" unless data.is_a?(Hash) && data.key?('objects')
parse_response_links(data['objects'])
rescue JSON::ParserError
raise DownloadLinksError, "LFS Batch API response is not JSON"
end
def parse_response_links(objects_response)
objects_response.each_with_object([]) do |entry, link_list|
link = entry.dig('actions', DOWNLOAD_ACTION, 'href')
headers = entry.dig('actions', DOWNLOAD_ACTION, 'header')
raise DownloadLinkNotFound unless link
link_list << LfsDownloadObject.new(
oid: entry['oid'],
size: entry['size'],
headers: headers,
link: add_credentials(link)
)
rescue DownloadLinkNotFound, Addressable::URI::InvalidURIError
log_error("Link for Lfs Object with oid #{entry['oid']} not found or invalid.")
end
end
def request_body(oids)
body = {
operation: DOWNLOAD_ACTION,
objects: oids.map { |oid, size| { oid: oid, size: size } }
}
Gitlab::Json.dump(body)
end
def headers
{
'Accept' => LfsRequest::CONTENT_TYPE,
'Content-Type' => LfsRequest::CONTENT_TYPE
}.freeze
end
def add_credentials(link)
uri = Addressable::URI.parse(link)
if should_add_credentials?(uri)
uri.user = remote_uri.user
uri.password = remote_uri.password
end
uri.to_s
end
# The download link can be a local url or an object storage url
# If the download link has the some host as the import url then
# we add the same credentials because we may need them
def should_add_credentials?(link_uri)
url_credentials? && link_uri.host == remote_uri.host
end
def url_credentials?
remote_uri.user.present? || remote_uri.password.present?
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService, feature_category: :source_code_management do
let(:import_url) { 'http://www.gitlab.com/demo/repo.git' }
let(:lfs_endpoint) { "#{import_url}/info/lfs/objects/batch" }
let!(:project) { create(:project, import_url: import_url) }
let(:new_oids) { { 'oid1' => 123, 'oid2' => 125 } }
let(:headers) { { 'X-Some-Header' => '456' } }
let(:remote_uri) { URI.parse(lfs_endpoint) }
let(:request_object) { HTTParty::Request.new(Net::HTTP::Post, '/') }
let(:parsed_block) { lambda {} }
let(:success_net_response) { Net::HTTPOK.new('', '', '') }
let(:response) { Gitlab::HTTP::Response.new(request_object, net_response, parsed_block) }
def objects_response(oids)
body = oids.map do |oid, size|
{
'oid' => oid, 'size' => size,
'actions' => {
'download' => { 'href' => "#{import_url}/gitlab-lfs/objects/#{oid}", header: headers }
}
}
end
Struct.new(:success?, :objects).new(true, body).to_json
end
def custom_response(net_response, body = nil)
allow(net_response).to receive(:body).and_return(body)
Gitlab::HTTP::Response.new(request_object, net_response, parsed_block)
end
let(:invalid_object_response) do
[
'oid' => 'whatever',
'size' => 123
]
end
subject(:service) { described_class.new(project, remote_uri: remote_uri) }
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
response = custom_response(success_net_response, objects_response(new_oids))
allow(Gitlab::HTTP).to receive(:post).and_return(response)
end
describe '#each_link' do
it 'retrieves each download link of every non existent lfs object' do
links = []
service.each_link(new_oids) { |lfs_download_object| links << lfs_download_object.link }
expect(links).to contain_exactly(
"#{import_url}/gitlab-lfs/objects/oid1",
"#{import_url}/gitlab-lfs/objects/oid2"
)
end
it 'stores headers' do
expected_headers = []
service.each_link(new_oids) do |lfs_download_object|
expected_headers << lfs_download_object.headers
end
expect(expected_headers).to contain_exactly(headers, headers)
end
context 'when lfs objects size is larger than the batch size' do
def stub_successful_request(batch)
response = custom_response(success_net_response, objects_response(batch))
stub_request(batch, response)
end
def stub_entity_too_large_error_request(batch)
entity_too_large_net_response = Net::HTTPRequestEntityTooLarge.new('', '', '')
response = custom_response(entity_too_large_net_response)
stub_request(batch, response)
end
def stub_request(batch, response)
expect(Gitlab::HTTP).to receive(:post).with(
remote_uri,
{
body: { operation: 'download', objects: batch.map { |k, v| { oid: k, size: v } } }.to_json,
headers: subject.send(:headers)
}
).and_return(response)
end
let(:new_oids) { { 'oid1' => 123, 'oid2' => 125, 'oid3' => 126, 'oid4' => 127, 'oid5' => 128 } }
context 'when batch size' do
before do
stub_const("#{described_class.name}::REQUEST_BATCH_SIZE", 2)
data = new_oids.to_a
stub_successful_request([data[0], data[1]])
stub_successful_request([data[2], data[3]])
stub_successful_request([data[4]])
end
it 'retrieves them in batches' do
checksum = 0
service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link).to eq "#{import_url}/gitlab-lfs/objects/#{lfs_download_object.oid}"
checksum += 1
end
expect(checksum).to eq new_oids.size
end
end
context 'when request fails with PayloadTooLarge error' do
let(:error_class) { described_class::DownloadLinksRequestEntityTooLargeError }
context 'when the smaller batch eventually works' do
before do
stub_const("#{described_class.name}::REQUEST_BATCH_SIZE", 5)
data = new_oids.to_a
# with the batch size of 5
stub_entity_too_large_error_request(data)
# with the batch size of 2
stub_successful_request([data[0], data[1]])
stub_successful_request([data[2], data[3]])
stub_successful_request([data[4]])
end
it 'retreives them eventually and logs exceptions' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
an_instance_of(error_class), project_id: project.id, batch_size: 5, oids_count: 5
)
checksum = 0
service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link).to eq "#{import_url}/gitlab-lfs/objects/#{lfs_download_object.oid}"
checksum += 1
end
expect(checksum).to eq new_oids.size
end
end
context 'when batch size cannot be any smaller' do
before do
stub_const("#{described_class.name}::REQUEST_BATCH_SIZE", 5)
data = new_oids.to_a
# with the batch size of 5
stub_entity_too_large_error_request(data)
# with the batch size of 2
stub_entity_too_large_error_request([data[0], data[1]])
end
it 'raises an error and logs exceptions' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
an_instance_of(error_class), project_id: project.id, batch_size: 5, oids_count: 5
)
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
an_instance_of(error_class), project_id: project.id, batch_size: 2, oids_count: 5
)
expect { service.each_link(new_oids) }.to raise_error(described_class::DownloadLinksError)
end
end
end
end
context 'credentials' do
context 'when the download link and the lfs_endpoint have the same host' do
context 'when lfs_endpoint has credentials' do
let(:import_url) { 'http://user:[email protected]/demo/repo.git' }
it 'adds credentials to the download_link' do
checksum = 0
service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link.starts_with?('http://user:password@')).to be_truthy
checksum += 1
end
expect(checksum).to eq new_oids.size
end
end
context 'when lfs_endpoint does not have any credentials' do
it 'does not add any credentials' do
checksum = 0
service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link.starts_with?('http://user:password@')).to be_falsey
checksum += 1
end
expect(checksum).to eq new_oids.size
end
end
end
context 'when the download link and the lfs_endpoint have different hosts' do
let(:import_url_with_credentials) { 'http://user:[email protected]/demo/repo.git' }
let(:lfs_endpoint) { "#{import_url_with_credentials}/info/lfs/objects/batch" }
it 'downloads without any credentials' do
checksum = 0
service.each_link(new_oids) do |lfs_download_object|
expect(lfs_download_object.link.starts_with?('http://user:password@')).to be_falsey
checksum += 1
end
expect(checksum).to eq new_oids.size
end
end
end
end
describe '#download_links_for' do
context 'if request fails' do
before do
request_timeout_net_response = Net::HTTPRequestTimeout.new('', '', '')
response = custom_response(request_timeout_net_response)
allow(Gitlab::HTTP).to receive(:post).and_return(response)
end
it 'raises an error' do
expect { subject.send(:download_links_for, new_oids) }.to raise_error(described_class::DownloadLinksError)
end
end
shared_examples 'JSON parse errors' do |body|
it 'raises an error' do
response = custom_response(success_net_response)
allow(response).to receive(:body).and_return(body)
allow(Gitlab::HTTP).to receive(:post).and_return(response)
expect { subject.send(:download_links_for, new_oids) }.to raise_error(described_class::DownloadLinksError)
end
end
it_behaves_like 'JSON parse errors', '{'
it_behaves_like 'JSON parse errors', '{}'
it_behaves_like 'JSON parse errors', '{ foo: 123 }'
end
describe '#parse_response_links' do
it 'does not add oid entry if href not found' do
expect(subject).to receive(:log_error).with("Link for Lfs Object with oid whatever not found or invalid.")
result = subject.send(:parse_response_links, invalid_object_response)
expect(result).to be_empty
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# This service is responsible of managing the retrieval of the lfs objects,
# and call the service LfsDownloadService, which performs the download
# for each of the retrieved lfs objects
module Projects
module LfsPointers
class LfsImportService < BaseService
def execute
return success unless project&.lfs_enabled?
LfsObjectDownloadListService.new(project).each_list_item do |lfs_download_object|
LfsDownloadService.new(project, lfs_download_object).execute
end
success
rescue StandardError, GRPC::Core::CallError => e
error(e.message)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::LfsPointers::LfsImportService, feature_category: :source_code_management do
let(:project) { create(:project) }
let(:user) { project.creator }
let(:import_url) { 'http://www.gitlab.com/demo/repo.git' }
let(:oid_download_links) do
[
{ 'oid1' => "#{import_url}/gitlab-lfs/objects/oid1" },
{ 'oid2' => "#{import_url}/gitlab-lfs/objects/oid2" }
]
end
subject { described_class.new(project, user) }
context 'when lfs is enabled for the project' do
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
end
it 'downloads lfs objects' do
service = double
expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
expect(instance).to receive(:each_list_item)
.and_yield(oid_download_links[0]).and_yield(oid_download_links[1])
end
expect(Projects::LfsPointers::LfsDownloadService).to receive(:new).and_return(service).twice
expect(service).to receive(:execute).twice
result = subject.execute
expect(result[:status]).to eq :success
end
context 'when no downloadable lfs object links' do
it 'does not call LfsDownloadService' do
expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
expect(instance).to receive(:each_list_item)
end
expect(Projects::LfsPointers::LfsDownloadService).not_to receive(:new)
result = subject.execute
expect(result[:status]).to eq :success
end
end
context 'when an exception is raised' do
it 'returns error' do
error_message = "error message"
expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
expect(instance).to receive(:each_list_item).and_raise(StandardError, error_message)
end
result = subject.execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq error_message
end
end
context 'when an GRPC::Core::CallError exception raised' do
it 'returns error' do
error_message = "error message"
expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
expect(instance).to receive(:each_list_item).and_raise(GRPC::Core::CallError, error_message)
end
result = subject.execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq error_message
end
end
end
context 'when lfs is not enabled for the project' do
it 'does not download lfs objects' do
allow(project).to receive(:lfs_enabled?).and_return(false)
expect(Projects::LfsPointers::LfsObjectDownloadListService).not_to receive(:new)
expect(Projects::LfsPointers::LfsDownloadService).not_to receive(:new)
result = subject.execute
expect(result[:status]).to eq :success
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Given a list of oids, this services links the existent Lfs Objects to the project
module Projects
module LfsPointers
class LfsLinkService < BaseService
TooManyOidsError = Class.new(StandardError)
MAX_OIDS = ENV.fetch('GITLAB_LFS_MAX_OID_TO_FETCH', 100_000).to_i
BATCH_SIZE = 1000
# Accept an array of oids to link
#
# Returns an array with the oid of the existent lfs objects
def execute(oids)
return [] unless project&.lfs_enabled?
validate!(oids)
yield if block_given?
# Search and link existing LFS Object
link_existing_lfs_objects(oids)
end
private
def validate!(oids)
return if oids.size <= MAX_OIDS
raise TooManyOidsError, 'Too many LFS object ids to link, please push them manually'
end
def link_existing_lfs_objects(oids)
linked_existing_objects = []
iterations = 0
oids.each_slice(BATCH_SIZE) do |oids_batch|
# Load all existing LFS Objects immediately so we don't issue an extra
# query for the `.any?`
existent_lfs_objects = LfsObject.for_oids(oids_batch).load
next unless existent_lfs_objects.any?
rows = existent_lfs_objects
.not_linked_to_project(project)
.map { |existing_lfs_object| { project_id: project.id, lfs_object_id: existing_lfs_object.id } }
ApplicationRecord.legacy_bulk_insert(:lfs_objects_projects, rows) # rubocop:disable Gitlab/BulkInsert
iterations += 1
linked_existing_objects += existent_lfs_objects.map(&:oid)
end
log_lfs_link_results(linked_existing_objects.count, iterations)
linked_existing_objects
end
def log_lfs_link_results(lfs_objects_linked_count, iterations)
Gitlab::Import::Logger.info(
class: self.class.name,
project_id: project.id,
project_path: project.full_path,
lfs_objects_linked_count: lfs_objects_linked_count,
iterations: iterations)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::LfsPointers::LfsLinkService, feature_category: :source_code_management do
let_it_be(:project) { create(:project, lfs_enabled: true) }
let_it_be(:lfs_objects_project) { create_list(:lfs_objects_project, 2, project: project) }
let(:new_oids) { { 'oid1' => 123, 'oid2' => 125 } }
let(:all_oids) { LfsObject.pluck(:oid, :size).to_h.merge(new_oids) }
let(:new_lfs_object) { create(:lfs_object) }
let(:new_oid_list) { all_oids.merge(new_lfs_object.oid => new_lfs_object.size) }
subject { described_class.new(project) }
before do
allow(project).to receive(:lfs_enabled?).and_return(true)
end
describe '#execute' do
it 'raises an error when trying to link too many objects at once' do
stub_const("#{described_class}::MAX_OIDS", 5)
oids = Array.new(described_class::MAX_OIDS) { |i| "oid-#{i}" }
oids << 'the straw'
expect { subject.execute(oids) }.to raise_error(described_class::TooManyOidsError)
end
it 'executes a block after validation and before execution' do
block = instance_double(Proc)
expect(subject).to receive(:validate!).ordered
expect(block).to receive(:call).ordered
expect(subject).to receive(:link_existing_lfs_objects).ordered
subject.execute([]) do
block.call
end
end
it 'links existing lfs objects to the project' do
expect(project.lfs_objects.count).to eq 2
linked = subject.execute(new_oid_list.keys)
expect(project.lfs_objects.count).to eq 3
expect(linked.size).to eq 3
end
it 'returns linked oids' do
linked = lfs_objects_project.map(&:lfs_object).map(&:oid) << new_lfs_object.oid
expect(subject.execute(new_oid_list.keys)).to contain_exactly(*linked)
end
it 'links in batches' do
stub_const("#{described_class}::BATCH_SIZE", 3)
expect(Gitlab::Import::Logger).to receive(:info).with(
class: described_class.name,
project_id: project.id,
project_path: project.full_path,
lfs_objects_linked_count: 7,
iterations: 3
)
lfs_objects = create_list(:lfs_object, 7)
linked = subject.execute(lfs_objects.pluck(:oid))
expect(project.lfs_objects.count).to eq 9
expect(linked.size).to eq 7
end
it 'only queries for the batch that will be processed', :aggregate_failures do
stub_const("#{described_class}::BATCH_SIZE", 1)
oids = %w[one two]
expect(LfsObject).to receive(:for_oids).with(%w[one]).once.and_call_original
expect(LfsObject).to receive(:for_oids).with(%w[two]).once.and_call_original
subject.execute(oids)
end
it 'only queries 3 times' do
# make sure that we don't count the queries in the setup
new_oid_list
# These are repeated for each batch of oids: maximum (MAX_OIDS / BATCH_SIZE) times
# 1. Load the batch of lfs object ids that we might know already
# 2. Load the objects that have not been linked to the project yet
# 3. Insert the lfs_objects_projects for that batch
expect { subject.execute(new_oid_list.keys) }.not_to exceed_query_limit(3)
end
context 'when MAX_OIDS is 5' do
let(:max_oids) { 5 }
let(:oids) { Array.new(max_oids) { |i| "oid-#{i}" } }
before do
stub_const("#{described_class}::MAX_OIDS", max_oids)
end
it 'does not raise an error when trying to link exactly the OID limit' do
expect { subject.execute(oids) }.not_to raise_error
end
it 'raises an error when trying to link more than OID limit' do
oids << 'the straw'
expect { subject.execute(oids) }.to raise_error(described_class::TooManyOidsError)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# This service downloads and links lfs objects from a remote URL
module Projects
module LfsPointers
class LfsDownloadService < BaseService
SizeError = Class.new(StandardError)
OidError = Class.new(StandardError)
ResponseError = Class.new(StandardError)
LARGE_FILE_SIZE = 1.megabytes
attr_reader :lfs_download_object
delegate :oid, :size, :credentials, :sanitized_url, :headers, to: :lfs_download_object, prefix: :lfs
def initialize(project, lfs_download_object)
super(project)
@lfs_download_object = lfs_download_object
end
def execute
return unless project&.lfs_enabled? && lfs_download_object
return error("LFS file with oid #{lfs_oid} has invalid attributes") unless lfs_download_object.valid?
return link_existing_lfs_object! if Feature.enabled?(:lfs_link_existing_object, project) && lfs_size > LARGE_FILE_SIZE && lfs_object
wrap_download_errors do
download_lfs_file!
end
end
private
def wrap_download_errors(&block)
yield
rescue SizeError, OidError, ResponseError, StandardError => e
error("LFS file with oid #{lfs_oid} couldn't be downloaded from #{lfs_sanitized_url}: #{e.message}")
end
def download_lfs_file!
with_tmp_file do |tmp_file|
download_and_save_file!(tmp_file)
project.lfs_objects << find_or_create_lfs_object(tmp_file)
success
end
end
def find_or_create_lfs_object(tmp_file)
lfs_obj = LfsObject.safe_find_or_create_by!(
oid: lfs_oid,
size: lfs_size
)
lfs_obj.update!(file: tmp_file) unless lfs_obj.file.file
lfs_obj
end
def download_and_save_file!(file)
digester = Digest::SHA256.new
fetch_file do |fragment|
if digest_fragment?(fragment)
digester << fragment
file.write(fragment)
end
raise_size_error! if file.size > lfs_size
end
raise_size_error! if file.size != lfs_size
raise_oid_error! if digester.hexdigest != lfs_oid
end
def digest_fragment?(fragment)
fragment.http_response.is_a?(Net::HTTPSuccess)
end
def download_options
http_options = { headers: lfs_headers, stream_body: true }
return http_options if lfs_download_object.has_authorization_header?
http_options.tap do |options|
if lfs_credentials[:user].present? || lfs_credentials[:password].present?
# Using authentication headers in the request
options[:basic_auth] = { username: lfs_credentials[:user], password: lfs_credentials[:password] }
end
end
end
def fetch_file(&block)
attempts ||= 1
response = Gitlab::HTTP.get(lfs_sanitized_url, download_options, &block)
raise ResponseError, "Received error code #{response.code}" unless response.success?
rescue Net::OpenTimeout
raise if attempts >= 3
attempts += 1
retry
end
def with_tmp_file
create_tmp_storage_dir
File.open(tmp_filename, 'wb') do |file|
yield file
rescue StandardError => e
# If the lfs file is successfully downloaded it will be removed
# when it is added to the project's lfs files.
# Nevertheless if any exception raises the file would remain
# in the file system. Here we ensure to remove it
File.unlink(file) if File.exist?(file)
raise e
end
end
def tmp_filename
File.join(tmp_storage_dir, lfs_oid)
end
def create_tmp_storage_dir
FileUtils.makedirs(tmp_storage_dir) unless Dir.exist?(tmp_storage_dir)
end
def tmp_storage_dir
@tmp_storage_dir ||= File.join(storage_dir, 'tmp', 'download')
end
def storage_dir
@storage_dir ||= Gitlab.config.lfs.storage_path
end
def raise_size_error!
raise SizeError, 'Size mistmatch'
end
def raise_oid_error!
raise OidError, 'Oid mismatch'
end
def error(message, http_status = nil)
log_error(message)
super
end
def lfs_object
@lfs_object ||= LfsObject.find_by_oid(lfs_oid)
end
def link_existing_lfs_object!
existing_file = lfs_object.file.open
buffer_size = 0
result = fetch_file do |fragment|
unless fragment == existing_file.read(fragment.size)
break error("LFS file with oid #{lfs_oid} cannot be linked with an existing LFS object")
end
buffer_size += fragment.size
break success if buffer_size > LARGE_FILE_SIZE
end
project.lfs_objects << lfs_object
result
ensure
existing_file&.close
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::LfsPointers::LfsDownloadService, feature_category: :source_code_management do
include StubRequests
let_it_be(:project) { create(:project) }
let(:lfs_content) { SecureRandom.random_bytes(10) }
let(:oid) { Digest::SHA256.hexdigest(lfs_content) }
let(:download_link) { "http://gitlab.com/#{oid}" }
let(:size) { lfs_content.size }
let(:lfs_object) { LfsDownloadObject.new(oid: oid, size: size, link: download_link) }
let(:local_request_setting) { false }
subject { described_class.new(project, lfs_object) }
before_all do
ApplicationSetting.create_from_defaults
end
before do
stub_application_setting(allow_local_requests_from_web_hooks_and_services: local_request_setting)
allow(project).to receive(:lfs_enabled?).and_return(true)
end
shared_examples 'lfs temporal file is removed' do
it do
subject.execute
expect(File.exist?(subject.send(:tmp_filename))).to be false
end
end
shared_examples 'no lfs object is created' do
it do
expect { subject.execute }.not_to change { LfsObject.count }
end
it 'returns error result' do
expect(subject.execute[:status]).to eq :error
end
it 'an error is logged' do
expect(subject).to receive(:log_error)
subject.execute
end
it_behaves_like 'lfs temporal file is removed'
end
shared_examples 'lfs object is created' do
it 'creates and associate the LFS object to project' do
expect(subject).to receive(:download_and_save_file!).and_call_original
expect { subject.execute }.to change { LfsObject.count }.by(1)
expect(LfsObject.first.projects).to include(project)
end
it 'returns success result' do
expect(subject.execute[:status]).to eq :success
end
it_behaves_like 'lfs temporal file is removed'
end
describe '#execute' do
context 'when file download succeeds' do
before do
stub_full_request(download_link).to_return(body: lfs_content)
end
it_behaves_like 'lfs object is created'
it 'has the same oid' do
subject.execute
expect(LfsObject.first.oid).to eq oid
end
it 'has the same size' do
subject.execute
expect(LfsObject.first.size).to eq size
end
it 'stores the content' do
subject.execute
expect(File.binread(LfsObject.first.file.file.file)).to eq lfs_content
end
it 'streams the download' do
expected_options = { headers: anything, stream_body: true }
expect(Gitlab::HTTP).to receive(:get).with(anything, expected_options)
subject.execute
end
it 'skips read_total_timeout', :aggregate_failures do
stub_const('GitLab::HTTP::DEFAULT_READ_TOTAL_TIMEOUT', 0)
expect(ProjectCacheWorker).to receive(:perform_async).once
expect(Gitlab::Metrics::System).not_to receive(:monotonic_time)
expect(subject.execute).to include(status: :success)
end
end
context 'when file downloading response code is not success' do
before do
allow(Gitlab::HTTP).to receive(:get).and_return(code: 500, 'success?' => false)
end
it_behaves_like 'no lfs object is created'
it 'raise StandardError exception' do
expect(subject).to receive(:download_and_save_file!).and_raise(StandardError)
subject.execute
end
end
context 'when file downloading request timeout few times' do
before do
allow(Gitlab::HTTP).to receive(:get).and_raise(Net::OpenTimeout)
end
it_behaves_like 'no lfs object is created'
it 'retries to get LFS object 3 times before raising exception' do
subject.execute
expect(Gitlab::HTTP).to have_received(:get).exactly(3).times
end
end
context 'when file download returns a redirect' do
let(:redirect_link) { 'http://external-link' }
before do
stub_full_request(download_link).to_return(status: 301, body: 'You are being redirected', headers: { 'Location' => redirect_link })
stub_full_request(redirect_link).to_return(body: lfs_content)
end
it_behaves_like 'lfs object is created'
it 'correctly stores lfs object' do
subject.execute
new_lfs_object = LfsObject.first
expect(new_lfs_object).to have_attributes(oid: oid, size: size)
expect(File.binread(new_lfs_object.file.file.file)).to eq lfs_content
end
end
context 'when downloaded lfs file has a different size' do
let(:size) { 1 }
before do
stub_full_request(download_link).to_return(body: lfs_content)
end
it_behaves_like 'no lfs object is created'
it 'raise SizeError exception' do
expect(subject).to receive(:download_and_save_file!).and_raise(described_class::SizeError)
subject.execute
end
end
context 'when downloaded lfs file has a different oid' do
before do
stub_full_request(download_link).to_return(body: lfs_content)
allow_any_instance_of(Digest::SHA256).to receive(:hexdigest).and_return('foobar')
end
it_behaves_like 'no lfs object is created'
it 'raise OidError exception' do
expect(subject).to receive(:download_and_save_file!).and_raise(described_class::OidError)
subject.execute
end
end
context 'when an lfs object with the same oid already exists' do
let!(:existing_lfs_object) { create(:lfs_object, oid: oid) }
before do
stub_full_request(download_link).to_return(body: lfs_content)
end
it_behaves_like 'no lfs object is created'
it 'does not update the file attached to the existing LfsObject' do
expect { subject.execute }
.not_to change { existing_lfs_object.reload.file.file.file }
end
end
context 'when credentials present' do
let(:download_link_with_credentials) { "http://user:[email protected]/#{oid}" }
let(:lfs_object) { LfsDownloadObject.new(oid: oid, size: size, link: download_link_with_credentials) }
let!(:request_stub) { stub_full_request(download_link).with(headers: { 'Authorization' => 'Basic dXNlcjpwYXNzd29yZA==' }).to_return(body: lfs_content) }
it 'the request adds authorization headers' do
subject.execute
expect(request_stub).to have_been_requested
end
context 'when Authorization header is present' do
let(:auth_header) { { 'Authorization' => 'Basic 12345' } }
let(:lfs_object) { LfsDownloadObject.new(oid: oid, size: size, link: download_link_with_credentials, headers: auth_header) }
let!(:request_stub) { stub_full_request(download_link).with(headers: auth_header).to_return(body: lfs_content) }
it 'request uses the header auth' do
subject.execute
expect(request_stub).to have_been_requested
end
end
end
context 'when localhost requests are allowed' do
let(:download_link) { 'http://192.168.2.120' }
let(:local_request_setting) { true }
before do
stub_full_request(download_link, ip_address: '192.168.2.120').to_return(body: lfs_content)
end
it_behaves_like 'lfs object is created'
end
context 'when a bad URL is used' do
where(download_link: ['/etc/passwd', 'ftp://example.com', 'http://127.0.0.2', 'http://192.168.2.120'])
with_them do
it 'does not download the file' do
expect(subject).not_to receive(:download_lfs_file!)
expect { subject.execute }.not_to change { LfsObject.count }
end
end
end
context 'when the URL points to a redirected URL' do
context 'that is blocked' do
where(redirect_link: ['ftp://example.com', 'http://127.0.0.2', 'http://192.168.2.120'])
with_them do
before do
stub_full_request(download_link, ip_address: '192.168.2.120')
.to_return(status: 301, headers: { 'Location' => redirect_link })
end
it_behaves_like 'no lfs object is created'
end
end
context 'that is not blocked' do
let(:redirect_link) { "http://example.com/" }
before do
stub_full_request(download_link).to_return(status: 301, headers: { 'Location' => redirect_link })
stub_full_request(redirect_link).to_return(body: lfs_content)
end
it_behaves_like 'lfs object is created'
end
end
context 'when the lfs object attributes are invalid' do
let(:oid) { 'foobar' }
before do
expect(lfs_object).to be_invalid
end
it_behaves_like 'no lfs object is created'
it 'does not download the file' do
expect(subject).not_to receive(:download_lfs_file!)
subject.execute
end
end
context 'when a large lfs object with the same oid already exists' do
let!(:existing_lfs_object) { create(:lfs_object, :with_file, :correct_oid) }
before do
stub_const("#{described_class}::LARGE_FILE_SIZE", 500)
stub_full_request(download_link).to_return(body: lfs_content)
end
context 'and first fragments are the same' do
let(:lfs_content) { existing_lfs_object.file.read }
context 'when lfs_link_existing_object feature flag disabled' do
before do
stub_feature_flags(lfs_link_existing_object: false)
end
it 'does not call link_existing_lfs_object!' do
expect(subject).not_to receive(:link_existing_lfs_object!)
subject.execute
end
end
it 'returns success' do
expect(subject.execute).to eq({ status: :success })
end
it 'links existing lfs object to the project' do
expect { subject.execute }
.to change { project.lfs_objects.include?(existing_lfs_object) }.from(false).to(true)
end
end
context 'and first fragments diverges' do
let(:lfs_content) { SecureRandom.random_bytes(1000) }
let(:oid) { existing_lfs_object.oid }
it 'raises oid mismatch error' do
expect(subject.execute).to eq({
status: :error,
message: "LFS file with oid #{oid} cannot be linked with an existing LFS object"
})
end
it 'does not change lfs objects' do
expect { subject.execute }.not_to change { project.lfs_objects }
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# This service discovers the Lfs files that are linked in repository,
# but not downloaded yet and yields the operation
# on each Lfs file link (url) to remote repository.
module Projects
module LfsPointers
class LfsObjectDownloadListService < BaseService
include Gitlab::Utils::StrongMemoize
HEAD_REV = 'HEAD'
LFS_ENDPOINT_PATTERN = /^\t?url\s*=\s*(.+)$/
LFS_BATCH_API_ENDPOINT = '/info/lfs/objects/batch'
LfsObjectDownloadListError = Class.new(StandardError)
def each_list_item(&block)
return unless context_valid?
# Downloading the required information and gathering it inside an
# LfsDownloadObject for each oid
LfsDownloadLinkListService
.new(project, remote_uri: current_endpoint_uri)
.each_link(missing_lfs_files, &block)
rescue LfsDownloadLinkListService::DownloadLinksError => e
raise LfsObjectDownloadListError, "The LFS objects download list couldn't be imported. Error: #{e.message}"
end
private
def context_valid?
return false unless project&.lfs_enabled?
return true unless external_lfs_endpoint?
# If the endpoint host is different from the import_url it means
# that the repo is using a third party service for storing the LFS files.
# In this case, we have to disable lfs in the project
disable_lfs!
false
end
def external_lfs_endpoint?
lfsconfig_endpoint_uri && lfsconfig_endpoint_uri.host != import_uri.host
end
def disable_lfs!
unless project.update(lfs_enabled: false)
raise LfsDownloadLinkListService::DownloadLinksError, "Invalid project state"
end
end
# Retrieves all lfs pointers in the repository
def lfs_pointers_in_repository
@lfs_pointers_in_repository ||= LfsListService.new(project).execute
end
def existing_lfs_objects
project.lfs_objects
end
def existing_lfs_objects_hash
{}.tap do |hash|
existing_lfs_objects.find_each do |lfs_object|
hash[lfs_object.oid] = lfs_object.size
end
end
end
def missing_lfs_files
lfs_pointers_in_repository.except(*existing_lfs_objects_hash.keys)
end
def lfsconfig_endpoint_uri
strong_memoize(:lfsconfig_endpoint_uri) do
# Retrieveing the blob data from the .lfsconfig file
data = project.repository.lfsconfig_for(HEAD_REV)
# Parsing the data to retrieve the url
parsed_data = data&.match(LFS_ENDPOINT_PATTERN)
if parsed_data
URI.parse(parsed_data[1]).tap do |endpoint|
endpoint.user ||= import_uri.user
endpoint.password ||= import_uri.password
end
end
end
rescue URI::InvalidURIError
raise LfsObjectDownloadListError, 'Invalid URL in .lfsconfig file'
end
def import_uri
@import_uri ||= URI.parse(project.import_url)
rescue URI::InvalidURIError
raise LfsObjectDownloadListError, 'Invalid project import URL'
end
def current_endpoint_uri
(lfsconfig_endpoint_uri || default_endpoint_uri)
end
# The import url must end with '.git' here we ensure it is
def default_endpoint_uri
@default_endpoint_uri ||= import_uri.dup.tap do |uri|
path = uri.path.gsub(%r{/$}, '')
path += '.git' unless path.ends_with?('.git')
uri.path = path + LFS_BATCH_API_ENDPOINT
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService, feature_category: :source_code_management do
let(:import_url) { 'http://www.gitlab.com/demo/repo.git' }
let(:default_endpoint) { "#{import_url}/info/lfs/objects/batch" }
let(:group) { create(:group, lfs_enabled: true) }
let!(:project) { create(:project, namespace: group, import_url: import_url, lfs_enabled: true) }
let!(:lfs_objects_project) { create_list(:lfs_objects_project, 2, project: project) }
let!(:existing_lfs_objects) { LfsObject.pluck(:oid, :size).to_h }
let(:oids) { { 'oid1' => 123, 'oid2' => 125 } }
let(:oid_download_links) do
[
{ 'oid1' => "#{import_url}/gitlab-lfs/objects/oid1" },
{ 'oid2' => "#{import_url}/gitlab-lfs/objects/oid2" }
]
end
let(:all_oids) { existing_lfs_objects.merge(oids) }
let(:remote_uri) { URI.parse(lfs_endpoint) }
subject { described_class.new(project) }
before do
allow(project.repository).to receive(:lfsconfig_for).and_return(nil)
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
allow_any_instance_of(Projects::LfsPointers::LfsListService).to receive(:execute).and_return(all_oids)
end
describe '#each_list_item' do
context 'when no lfs pointer is linked' do
before do
allow_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:each_link).with(oids)
.and_yield(oid_download_links[0])
.and_yield(oid_download_links[1])
end
it 'retrieves all lfs pointers in the project repository' do
expect(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:new).with(project, remote_uri: URI.parse(default_endpoint))
.and_call_original
expect_any_instance_of(Projects::LfsPointers::LfsListService).to receive(:execute)
checksum = 0
subject.each_list_item { |lfs_object| checksum += 1 }
expect(checksum).to eq 2
end
context 'when no LFS objects exist' do
before do
project.lfs_objects.delete_all
end
it 'retrieves all LFS objects' do
expect(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:new).with(project, remote_uri: URI.parse(default_endpoint)).and_call_original
expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:each_link).with(all_oids)
subject.each_list_item {}
end
end
context 'when some LFS objects already exist' do
it 'retrieves the download links of non-existent objects' do
expect_any_instance_of(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:each_link).with(oids)
checksum = 0
subject.each_list_item { |lfs_object| checksum += 1 }
expect(checksum).to eq 2
end
end
end
context 'when lfsconfig file exists' do
before do
allow(project.repository).to receive(:lfsconfig_for).and_return("[lfs]\n\turl = #{lfs_endpoint}\n")
end
context 'when url points to the same import url host' do
let(:lfs_endpoint) { "#{import_url}/different_endpoint" }
let(:service) { instance_double(Projects::LfsPointers::LfsDownloadLinkListService, each_link: nil) }
it 'downloads lfs object using the new endpoint' do
expect(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:new)
.with(project, remote_uri: remote_uri)
.and_return(service)
subject.each_list_item {}
end
context 'when import url has credentials' do
let(:import_url) { 'http://user:[email protected]/demo/repo.git' }
it 'adds the credentials to the new endpoint' do
expect(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:new)
.with(
project,
remote_uri: URI.parse("http://user:[email protected]/demo/repo.git/different_endpoint")
)
.and_return(service)
subject.each_list_item {}
end
context 'when url has its own credentials' do
let(:lfs_endpoint) { "http://user1:[email protected]/demo/repo.git/different_endpoint" }
it 'does not add the import url credentials' do
expect(Projects::LfsPointers::LfsDownloadLinkListService)
.to receive(:new).with(project, remote_uri: remote_uri)
.and_return(service)
subject.each_list_item {}
end
end
end
end
context 'when url points to a third party service' do
let(:lfs_endpoint) { 'http://third_party_service.com/info/lfs/objects/' }
it 'disables lfs from the project' do
expect(project.lfs_enabled?).to be_truthy
subject.each_list_item {}
expect(project.lfs_enabled?).to be_falsey
end
it 'does not download anything' do
expect_any_instance_of(Projects::LfsPointers::LfsListService).not_to receive(:execute)
subject.each_list_item {}
end
end
end
end
describe '#default_endpoint_uri' do
let(:import_url) { 'http://www.gitlab.com/demo/repo' }
it 'adds suffix .git if the url does not have it' do
expect(subject.send(:default_endpoint_uri).path).to match(/repo.git/)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Emails
class ConfirmService < ::Emails::BaseService
def execute(email)
email.resend_confirmation_instructions
end
end
end
Emails::ConfirmService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Emails::ConfirmService, feature_category: :user_management do
let_it_be(:user) { create(:user) }
subject(:service) { described_class.new(user) }
describe '#execute' do
it 'enqueues a background job to send confirmation email again' do
email = user.emails.create!(email: '[email protected]')
travel_to(10.minutes.from_now) do
expect { service.execute(email) }.to have_enqueued_job.on_queue('mailers')
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Emails
class DestroyService < ::Emails::BaseService
def execute(email)
raise StandardError, 'Cannot delete primary email' if email.user_primary_email?
email.destroy && update_secondary_emails!(email.email)
end
private
def update_secondary_emails!(deleted_email)
result = ::Users::UpdateService.new(@current_user, user: @user).execute do |user|
user.unset_secondary_emails_matching_deleted_email!(deleted_email)
end
result[:status] == :success
end
end
end
Emails::DestroyService.prepend_mod_with('Emails::DestroyService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Emails::DestroyService, feature_category: :user_management do
let!(:user) { create(:user) }
let!(:email) { create(:email, user: user) }
subject(:service) { described_class.new(user, user: user) }
describe '#execute' do
it 'removes an email' do
response = service.execute(email)
expect(user.emails).not_to include(email)
expect(response).to be true
end
context 'when it corresponds to the user primary email' do
let(:email) { user.emails.find_by!(email: user.email) }
it 'does not remove the email and raises an exception' do
expect { service.execute(email) }.to raise_error(StandardError, 'Cannot delete primary email')
expect(user.emails).to include(email)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Emails
class CreateService < ::Emails::BaseService
def execute(extra_params = {})
skip_confirmation = params.delete(:skip_confirmation)
user.emails.create(params.merge(extra_params)).tap do |email|
email&.confirm if skip_confirmation && current_user.admin?
notification_service.new_email_address_added(user, email.email) if email.persisted? && !email.user_primary_email?
end
end
end
end
Emails::CreateService.prepend_mod_with('Emails::CreateService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Emails::CreateService, feature_category: :user_management do
let_it_be(:user) { create(:user) }
let(:opts) { { email: '[email protected]', user: user } }
subject(:service) { described_class.new(user, opts) }
describe '#execute' do
it 'creates an email with valid attributes' do
expect { service.execute }.to change { Email.count }.by(1)
expect(Email.where(opts)).not_to be_empty
end
it 'creates an email with additional attributes' do
expect { service.execute(confirmation_token: 'abc') }.to change { Email.count }.by(1)
expect(Email.find_by(opts).confirmation_token).to eq 'abc'
end
it 'has the right user association' do
service.execute
expect(user.emails).to include(Email.find_by(opts))
end
it 'sends a notification to the user' do
expect_next_instance_of(NotificationService) do |notification_service|
expect(notification_service).to receive(:new_email_address_added)
end
service.execute
end
it 'does not send a notification when the email is not persisted' do
allow_next_instance_of(NotificationService) do |notification_service|
expect(notification_service).not_to receive(:new_email_address_added)
end
service.execute(email: 'invalid@@example.com')
end
it 'does not send a notification email when the email is the primary, because we are creating the user' do
allow_next_instance_of(NotificationService) do |notification_service|
expect(notification_service).not_to receive(:new_email_address_added)
end
# This is here to ensure that the service is actually called.
allow_next_instance_of(described_class) do |create_service|
expect(create_service).to receive(:execute).and_call_original
end
create(:user)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module TwoFactor
class DestroyService < ::TwoFactor::BaseService
def execute
return error(_('You are not authorized to perform this action')) unless authorized?
return error(_('Two-factor authentication is not enabled for this user')) unless user.two_factor_enabled?
result = disable_two_factor
notify_on_success(user) if result[:status] == :success
result
end
private
def authorized?
can?(current_user, :disable_two_factor, user)
end
def disable_two_factor
::Users::UpdateService.new(current_user, user: user).execute do |user|
user.disable_two_factor!
end
end
def notify_on_success(user)
notification_service.disabled_two_factor(user)
end
end
end
TwoFactor::DestroyService.prepend_mod_with('TwoFactor::DestroyService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe TwoFactor::DestroyService, feature_category: :system_access do
let_it_be(:current_user) { create(:user) }
subject { described_class.new(current_user, user: user).execute }
context 'disabling two-factor authentication' do
shared_examples_for 'does not send notification email' do
context 'notification', :mailer do
it 'does not send a notification' do
perform_enqueued_jobs do
subject
end
should_not_email(user)
end
end
end
context 'when the user does not have two-factor authentication enabled' do
let(:user) { current_user }
it 'returns error' do
expect(subject).to eq(
{
status: :error,
message: 'Two-factor authentication is not enabled for this user'
}
)
end
it_behaves_like 'does not send notification email'
end
context 'when the user has two-factor authentication enabled' do
context 'when the executor is not authorized to disable two-factor authentication' do
context 'disabling the two-factor authentication of another user' do
let(:user) { create(:user, :two_factor) }
it 'returns error' do
expect(subject).to eq(
{
status: :error,
message: 'You are not authorized to perform this action'
}
)
end
it 'does not disable two-factor authentication' do
expect { subject }.not_to change { user.reload.two_factor_enabled? }.from(true)
end
it_behaves_like 'does not send notification email'
end
end
context 'when the executor is authorized to disable two-factor authentication' do
shared_examples_for 'disables two-factor authentication' do
it 'returns success' do
expect(subject).to eq({ status: :success })
end
it 'disables the two-factor authentication of the user' do
expect { subject }.to change { user.reload.two_factor_enabled? }.from(true).to(false)
end
context 'notification', :mailer do
it 'sends a notification' do
perform_enqueued_jobs do
subject
end
should_email(user)
end
end
end
context 'disabling their own two-factor authentication' do
let(:current_user) { create(:user, :two_factor) }
let(:user) { current_user }
it_behaves_like 'disables two-factor authentication'
end
context 'admin disables the two-factor authentication of another user', :enable_admin_mode do
let(:current_user) { create(:admin) }
let(:user) { create(:user, :two_factor) }
it_behaves_like 'disables two-factor authentication'
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Wikis
class CreateAttachmentService < Files::CreateService
ATTACHMENT_PATH = 'uploads'
MAX_FILENAME_LENGTH = 255
attr_reader :container
delegate :wiki, to: :container
delegate :repository, to: :wiki
def initialize(container:, current_user: nil, params: {})
super(nil, current_user, params)
@container = container
@file_name = clean_file_name(params[:file_name])
@file_path = File.join(ATTACHMENT_PATH, SecureRandom.hex, @file_name) if @file_name
@commit_message ||= "Upload attachment #{@file_name}"
@branch_name ||= wiki.default_branch
end
def create_commit!
wiki.create_wiki_repository
commit_result(create_transformed_commit(@file_content))
rescue Wiki::CouldNotCreateWikiError
raise_error("Error creating the wiki repository")
end
private
def clean_file_name(file_name)
return unless file_name.present?
file_name = truncate_file_name(file_name)
# CommonMark does not allow Urls with whitespaces, so we have to replace them
# Using the same regex Carrierwave use to replace invalid characters
file_name.gsub(CarrierWave::SanitizedFile.sanitize_regexp, '_')
end
def truncate_file_name(file_name)
return file_name if file_name.length <= MAX_FILENAME_LENGTH
extension = File.extname(file_name)
truncate_at = MAX_FILENAME_LENGTH - extension.length - 1
base_name = File.basename(file_name, extension)[0..truncate_at]
base_name + extension
end
def validate!
validate_file_name!
validate_permissions!
end
def validate_file_name!
raise_error('The file name cannot be empty') unless @file_name
end
def validate_permissions!
unless can?(current_user, :create_wiki, container)
raise_error('You are not allowed to push to the wiki')
end
end
def create_transformed_commit(content)
repository.create_file(
current_user,
@file_path,
content,
message: @commit_message,
branch_name: @branch_name,
author_email: @author_email,
author_name: @author_name)
end
def commit_result(commit_id)
{
file_name: @file_name,
file_path: @file_path,
branch: @branch_name,
commit: commit_id
}
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Wikis::CreateAttachmentService, feature_category: :wiki do
let(:container) { create(:project, :wiki_repo) }
let(:user) { create(:user) }
let(:file_name) { 'filename.txt' }
let(:file_path_regex) { %r{#{described_class::ATTACHMENT_PATH}/\h{32}/#{file_name}} }
let(:file_opts) do
{
file_name: file_name,
file_content: 'Content of attachment'
}
end
let(:opts) { file_opts }
subject(:service) { described_class.new(container: container, current_user: user, params: opts) }
before do
container.add_developer(user)
end
describe 'initialization' do
context 'author commit info' do
it 'does not raise error if user is nil' do
service = described_class.new(container: container, current_user: nil, params: opts)
expect(service.instance_variable_get(:@author_email)).to be_nil
expect(service.instance_variable_get(:@author_name)).to be_nil
end
context 'when no author info provided' do
it 'fills author_email and author_name from current_user info' do
expect(service.instance_variable_get(:@author_email)).to eq user.email
expect(service.instance_variable_get(:@author_name)).to eq user.name
end
end
context 'when author info provided' do
let(:author_email) { 'author_email' }
let(:author_name) { 'author_name' }
let(:opts) { file_opts.merge(author_email: author_email, author_name: author_name) }
it 'fills author_email and author_name from params' do
expect(service.instance_variable_get(:@author_email)).to eq author_email
expect(service.instance_variable_get(:@author_name)).to eq author_name
end
end
end
context 'commit message' do
context 'when no commit message provided' do
it 'sets a default commit message' do
expect(service.instance_variable_get(:@commit_message)).to eq "Upload attachment #{opts[:file_name]}"
end
end
context 'when commit message provided' do
let(:commit_message) { 'whatever' }
let(:opts) { file_opts.merge(commit_message: commit_message) }
it 'use the commit message from params' do
expect(service.instance_variable_get(:@commit_message)).to eq commit_message
end
end
end
context 'branch name' do
context 'when no branch provided' do
it 'sets the branch from the wiki default_branch' do
expect(service.instance_variable_get(:@branch_name)).to eq container.wiki.default_branch
end
end
context 'when branch provided' do
let(:branch_name) { 'whatever' }
let(:opts) { file_opts.merge(branch_name: branch_name) }
it 'use the commit message from params' do
expect(service.instance_variable_get(:@branch_name)).to eq branch_name
end
end
end
end
describe '#parse_file_name' do
context 'when file_name' do
context 'has white spaces' do
let(:file_name) { 'file with spaces' }
it "replaces all of them with '_'" do
result = service.execute
expect(result[:status]).to eq :success
expect(result[:result][:file_name]).to eq 'file_with_spaces'
end
end
context 'has other invalid characters' do
let(:file_name) { "file\twith\tinvalid chars" }
it "replaces all of them with '_'" do
result = service.execute
expect(result[:status]).to eq :success
expect(result[:result][:file_name]).to eq 'file_with_invalid_chars'
end
end
context 'is not present' do
let(:file_name) { nil }
it 'returns error' do
result = service.execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq 'The file name cannot be empty'
end
end
context 'length' do
context 'is bigger than 255' do
let(:file_name) { "#{'0' * 256}.jpg" }
it 'truncates file name' do
result = service.execute
expect(result[:status]).to eq :success
expect(result[:result][:file_name].length).to eq 255
expect(result[:result][:file_name]).to match(/0{251}\.jpg/)
end
end
context 'is less or equal to 255 does not return error' do
let(:file_name) { '0' * 255 }
it 'does not return error' do
result = service.execute
expect(result[:status]).to eq :success
end
end
end
end
context 'when user' do
shared_examples 'wiki attachment user validations' do
it 'returns error' do
result = described_class.new(container: container, current_user: user2, params: opts).execute
expect(result[:status]).to eq :error
expect(result[:message]).to eq 'You are not allowed to push to the wiki'
end
end
context 'does not have permission' do
let(:user2) { create(:user) }
it_behaves_like 'wiki attachment user validations'
end
context 'is nil' do
let(:user2) { nil }
it_behaves_like 'wiki attachment user validations'
end
end
end
it_behaves_like 'Wikis::CreateAttachmentService#execute', :project
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Kubernetes
GITLAB_SERVICE_ACCOUNT_NAME = 'gitlab'
GITLAB_SERVICE_ACCOUNT_NAMESPACE = 'default'
GITLAB_ADMIN_TOKEN_NAME = 'gitlab-token'
GITLAB_CLUSTER_ROLE_BINDING_NAME = 'gitlab-admin'
GITLAB_CLUSTER_ROLE_NAME = 'cluster-admin'
PROJECT_CLUSTER_ROLE_NAME = 'admin'
GITLAB_KNATIVE_SERVING_ROLE_NAME = 'gitlab-knative-serving-role'
GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME = 'gitlab-knative-serving-rolebinding'
GITLAB_CROSSPLANE_DATABASE_ROLE_NAME = 'gitlab-crossplane-database-role'
GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME = 'gitlab-crossplane-database-rolebinding'
KNATIVE_SERVING_NAMESPACE = 'knative-serving'
ISTIO_SYSTEM_NAMESPACE = 'istio-system'
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Kubernetes, feature_category: :deployment_management do
it { is_expected.to be_const_defined(:GITLAB_SERVICE_ACCOUNT_NAME) }
it { is_expected.to be_const_defined(:GITLAB_SERVICE_ACCOUNT_NAMESPACE) }
it { is_expected.to be_const_defined(:GITLAB_ADMIN_TOKEN_NAME) }
it { is_expected.to be_const_defined(:GITLAB_CLUSTER_ROLE_BINDING_NAME) }
it { is_expected.to be_const_defined(:GITLAB_CLUSTER_ROLE_NAME) }
it { is_expected.to be_const_defined(:PROJECT_CLUSTER_ROLE_NAME) }
it { is_expected.to be_const_defined(:GITLAB_KNATIVE_SERVING_ROLE_NAME) }
it { is_expected.to be_const_defined(:GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME) }
it { is_expected.to be_const_defined(:GITLAB_CROSSPLANE_DATABASE_ROLE_NAME) }
it { is_expected.to be_const_defined(:GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME) }
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
class BuildService
def initialize(subject)
@subject = subject
end
def execute
::Clusters::Cluster.new.tap do |cluster|
case @subject
when ::Project
cluster.cluster_type = :project_type
when ::Group
cluster.cluster_type = :group_type
when Instance
cluster.cluster_type = :instance_type
else
raise NotImplementedError
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::BuildService, feature_category: :deployment_management do
describe '#execute' do
subject { described_class.new(cluster_subject).execute }
describe 'when cluster subject is a project' do
let(:cluster_subject) { build(:project) }
it 'sets the cluster_type to project_type' do
is_expected.to be_project_type
end
end
describe 'when cluster subject is a group' do
let(:cluster_subject) { build(:group) }
it 'sets the cluster_type to group_type' do
is_expected.to be_group_type
end
end
describe 'when cluster subject is an instance' do
let(:cluster_subject) { Clusters::Instance.new }
it 'sets the cluster_type to instance_type' do
is_expected.to be_instance_type
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
class BuildKubernetesNamespaceService
attr_reader :cluster, :environment
def initialize(cluster, environment:)
@cluster = cluster
@environment = environment
end
def execute
cluster.kubernetes_namespaces.build(attributes)
end
private
def attributes
attributes = {
project: environment.project,
namespace: namespace,
service_account_name: "#{namespace}-service-account"
}
attributes[:cluster_project] = cluster.cluster_project if cluster.project_type?
attributes[:environment] = environment if cluster.namespace_per_environment?
attributes
end
def namespace
Gitlab::Kubernetes::DefaultNamespace.new(cluster, project: environment.project).from_environment_slug(environment.slug)
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::BuildKubernetesNamespaceService, feature_category: :deployment_management do
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:environment) { create(:environment) }
let(:project) { environment.project }
let(:namespace_generator) { double(from_environment_slug: namespace) }
let(:namespace) { 'namespace' }
subject { described_class.new(cluster, environment: environment).execute }
before do
allow(Gitlab::Kubernetes::DefaultNamespace).to receive(:new).and_return(namespace_generator)
end
shared_examples 'shared attributes' do
it 'initializes a new namespace and sets default values' do
expect(subject).to be_new_record
expect(subject.cluster).to eq cluster
expect(subject.project).to eq project
expect(subject.namespace).to eq namespace
expect(subject.service_account_name).to eq "#{namespace}-service-account"
end
end
include_examples 'shared attributes'
it 'sets cluster_project and environment' do
expect(subject.cluster_project).to eq cluster.cluster_project
expect(subject.environment).to eq environment
end
context 'namespace per environment is disabled' do
let(:cluster) { create(:cluster, :project, :provided_by_gcp, :namespace_per_environment_disabled) }
include_examples 'shared attributes'
it 'does not set environment' do
expect(subject.cluster_project).to eq cluster.cluster_project
expect(subject.environment).to be_nil
end
end
context 'group cluster' do
let(:cluster) { create(:cluster, :group, :provided_by_gcp) }
include_examples 'shared attributes'
it 'does not set cluster_project' do
expect(subject.cluster_project).to be_nil
expect(subject.environment).to eq environment
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
class DestroyService
attr_reader :current_user, :params
def initialize(user = nil, params = {})
@current_user = user
@params = params.dup
@response = {}
end
def execute(cluster)
cleanup? ? start_cleanup!(cluster) : destroy_cluster!(cluster)
@response
end
private
def cleanup?
Gitlab::Utils.to_boolean(params[:cleanup])
end
def start_cleanup!(cluster)
cluster.start_cleanup!
@response[:message] = _('Kubernetes cluster integration and resources are being removed.')
end
def destroy_cluster!(cluster)
cluster.destroy!
@response[:message] = _('Kubernetes cluster integration was successfully removed.')
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::DestroyService, feature_category: :deployment_management do
describe '#execute' do
subject { described_class.new(cluster.user, params).execute(cluster) }
let!(:cluster) { create(:cluster, :project, :provided_by_user) }
context 'when correct params' do
shared_examples 'only removes cluster' do
it 'does not start cleanup' do
expect(cluster).not_to receive(:start_cleanup)
subject
end
it 'destroys the cluster' do
subject
expect { cluster.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
context 'when params are empty' do
let(:params) { {} }
it_behaves_like 'only removes cluster'
end
context 'when cleanup param is false' do
let(:params) { { cleanup: 'false' } }
it_behaves_like 'only removes cluster'
end
context 'when cleanup param is true' do
let(:params) { { cleanup: 'true' } }
before do
allow(Clusters::Cleanup::ProjectNamespaceWorker).to receive(:perform_async)
end
it 'does not destroy cluster' do
subject
expect(Clusters::Cluster.where(id: cluster.id).exists?).not_to be_falsey
end
it 'transition cluster#cleanup_status from cleanup_not_started to cleanup_removing_project_namespaces' do
expect { subject }.to change { cluster.cleanup_status_name }
.from(:cleanup_not_started)
.to(:cleanup_removing_project_namespaces)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
class UpdateService
attr_reader :current_user, :params
def initialize(user = nil, params = {})
@current_user = user
@params = params.dup
end
def execute(cluster)
if validate_params(cluster)
token = params.dig(:platform_kubernetes_attributes, :token)
if token.blank?
params[:platform_kubernetes_attributes]&.delete(:token)
end
cluster.update(params)
else
false
end
end
private
def validate_params(cluster)
::Clusters::Management::ValidateManagementProjectPermissionsService.new(current_user)
.execute(cluster, params[:management_project_id])
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::UpdateService, feature_category: :deployment_management do
include KubernetesHelpers
describe '#execute' do
subject { described_class.new(cluster.user, params).execute(cluster) }
let(:cluster) { create(:cluster, :project, :provided_by_user) }
context 'when correct params' do
context 'when enabled is true' do
let(:params) { { enabled: true } }
it 'enables cluster' do
is_expected.to eq(true)
expect(cluster.enabled).to be_truthy
end
end
context 'when enabled is false' do
let(:params) { { enabled: false } }
it 'disables cluster' do
is_expected.to eq(true)
expect(cluster.enabled).to be_falsy
end
end
context 'when namespace is specified' do
let(:params) do
{
platform_kubernetes_attributes: {
namespace: 'custom-namespace'
}
}
end
before do
stub_kubeclient_get_namespace('https://kubernetes.example.com', namespace: 'my-namespace')
end
it 'updates namespace' do
is_expected.to eq(true)
expect(cluster.platform.namespace).to eq('custom-namespace')
end
end
context 'when service token is empty' do
let(:params) do
{
platform_kubernetes_attributes: {
token: ''
}
}
end
it 'does not update the token' do
current_token = cluster.platform.token
is_expected.to eq(true)
cluster.platform.reload
expect(cluster.platform.token).to eq(current_token)
end
end
context 'when service token is not empty' do
let(:params) do
{
platform_kubernetes_attributes: {
token: 'new secret token'
}
}
end
it 'updates the token' do
is_expected.to eq(true)
expect(cluster.platform.token).to eq('new secret token')
end
end
end
context 'when invalid params' do
let(:params) do
{
platform_kubernetes_attributes: {
namespace: '!!!'
}
}
end
it 'returns false' do
is_expected.to eq(false)
expect(cluster.errors[:"platform_kubernetes.namespace"]).to be_present
end
end
context 'when cluster is provided by GCP' do
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:params) do
{
name: 'my-new-name'
}
end
it 'does not change cluster name' do
is_expected.to eq(false)
cluster.reload
expect(cluster.name).to eq('test-cluster')
end
context 'when cluster is being created' do
let(:cluster) { create(:cluster, :providing_by_gcp) }
it 'rejects changes' do
is_expected.to eq(false)
expect(cluster.errors.full_messages).to include('Cannot modify provider during creation')
end
end
end
context 'when params includes :management_project_id' do
context 'management_project is non-existent' do
let(:params) do
{ management_project_id: 0 }
end
it 'does not update management_project_id' do
is_expected.to eq(false)
expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
cluster.reload
expect(cluster.management_project_id).to be_nil
end
end
shared_examples 'setting a management project' do
context 'user is authorized to adminster manangement_project' do
before do
management_project.add_maintainer(cluster.user)
end
let(:params) do
{ management_project_id: management_project.id }
end
it 'updates management_project_id' do
is_expected.to eq(true)
expect(cluster.management_project).to eq(management_project)
end
end
context 'user is not authorized to adminster manangement_project' do
let(:params) do
{ management_project_id: management_project.id }
end
it 'does not update management_project_id' do
is_expected.to eq(false)
expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
cluster.reload
expect(cluster.management_project_id).to be_nil
end
end
context 'cluster already has a management project set' do
before do
cluster.update!(management_project: create(:project))
end
let(:params) do
{ management_project_id: '' }
end
it 'unsets management_project_id' do
is_expected.to eq(true)
cluster.reload
expect(cluster.management_project_id).to be_nil
end
end
end
context 'project cluster' do
include_examples 'setting a management project' do
let(:management_project) { create(:project, namespace: cluster.first_project.namespace) }
end
context 'manangement_project is outside of the namespace scope' do
before do
management_project.update!(group: create(:group))
end
let(:params) do
{ management_project_id: management_project.id }
end
it 'does not update management_project_id' do
is_expected.to eq(false)
expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
cluster.reload
expect(cluster.management_project_id).to be_nil
end
end
end
context 'group cluster' do
let(:cluster) { create(:cluster, :group) }
include_examples 'setting a management project' do
let(:management_project) { create(:project, group: cluster.first_group) }
end
context 'manangement_project is outside of the namespace scope' do
before do
management_project.update!(group: create(:group))
end
let(:params) do
{ management_project_id: management_project.id }
end
it 'does not update management_project_id' do
is_expected.to eq(false)
expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
cluster.reload
expect(cluster.management_project_id).to be_nil
end
end
end
context 'instance cluster' do
let(:cluster) { create(:cluster, :instance) }
include_examples 'setting a management project' do
let(:management_project) { create(:project) }
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
class CreateService
attr_reader :current_user, :params
def initialize(user = nil, params = {})
@current_user = user
@params = params.dup
end
def execute(access_token: nil)
raise ArgumentError, 'Unknown clusterable provided' unless clusterable
cluster_params = params.merge(global_params).merge(clusterable_params)
cluster_params[:provider_gcp_attributes].try do |provider|
provider[:access_token] = access_token
end
cluster = Clusters::Cluster.new(cluster_params)
validate_management_project_permissions(cluster)
return cluster if cluster.errors.present?
cluster.tap(&:save)
end
private
def clusterable
@clusterable ||= params.delete(:clusterable)
end
def global_params
{ user: current_user }
end
def clusterable_params
case clusterable
when ::Project
{ cluster_type: :project_type, projects: [clusterable] }
when ::Group
{ cluster_type: :group_type, groups: [clusterable] }
when Instance
{ cluster_type: :instance_type }
else
raise NotImplementedError
end
end
def validate_management_project_permissions(cluster)
Clusters::Management::ValidateManagementProjectPermissionsService.new(current_user)
.execute(cluster, params[:management_project_id])
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::CreateService, feature_category: :deployment_management do
let(:access_token) { 'xxx' }
let(:project) { create(:project) }
let(:user) { create(:user) }
subject { described_class.new(user, params).execute(access_token: access_token) }
context 'when provider is gcp' do
context 'when project has no clusters' do
context 'when correct params' do
let(:params) do
{
name: 'test-cluster',
provider_type: :gcp,
provider_gcp_attributes: {
gcp_project_id: 'gcp-project',
zone: 'us-central1-a',
num_nodes: 1,
machine_type: 'machine_type-a',
legacy_abac: 'true'
},
clusterable: project
}
end
include_examples 'create cluster service success'
end
context 'when invalid params' do
let(:params) do
{
name: 'test-cluster',
provider_type: :gcp,
provider_gcp_attributes: {
gcp_project_id: '!!!!!!!',
zone: 'us-central1-a',
num_nodes: 1,
machine_type: 'machine_type-a'
},
clusterable: project
}
end
include_examples 'create cluster service error'
end
end
context 'when project has a cluster' do
include_context 'with valid cluster create params'
let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
it 'creates another cluster' do
expect { subject }.to change { Clusters::Cluster.count }.by(1)
end
end
end
context 'when another cluster exists' do
let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
context 'when correct params' do
let(:params) do
{
name: 'test-cluster',
provider_type: :gcp,
provider_gcp_attributes: {
gcp_project_id: 'gcp-project',
zone: 'us-central1-a',
num_nodes: 1,
machine_type: 'machine_type-a',
legacy_abac: 'true'
},
clusterable: project
}
end
include_examples 'create cluster service success'
end
context 'when invalid params' do
let(:params) do
{
name: 'test-cluster',
provider_type: :gcp,
provider_gcp_attributes: {
gcp_project_id: '!!!!!!!',
zone: 'us-central1-a',
num_nodes: 1,
machine_type: 'machine_type-a'
},
clusterable: project
}
end
include_examples 'create cluster service error'
end
end
context 'when params includes :management_project_id' do
subject(:cluster) { described_class.new(user, params).execute(access_token: access_token) }
let(:params) do
{
name: 'test-cluster',
provider_type: :gcp,
provider_gcp_attributes: {
gcp_project_id: 'gcp-project',
zone: 'us-central1-a',
num_nodes: 1,
machine_type: 'machine_type-a',
legacy_abac: 'true'
},
clusterable: clusterable,
management_project_id: management_project_id
}
end
let(:clusterable) { project }
let(:management_project_id) { management_project.id }
let(:management_project_namespace) { project.namespace }
let(:management_project) { create(:project, namespace: management_project_namespace) }
shared_examples 'invalid project or cluster permissions' do
it 'does not persist the cluster and adds errors' do
expect(cluster).not_to be_persisted
expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
end
end
shared_examples 'setting a management project' do
context 'when user is authorized to adminster manangement_project' do
before do
management_project.add_maintainer(user)
end
it 'persists the cluster' do
expect(cluster).to be_persisted
expect(cluster.management_project).to eq(management_project)
end
end
context 'when user is not authorized to adminster manangement_project' do
include_examples 'invalid project or cluster permissions'
end
end
shared_examples 'setting a management project outside of scope' do
context 'when manangement_project is outside of the namespace scope' do
let(:management_project_namespace) { create(:group) }
it 'does not persist the cluster' do
expect(cluster).not_to be_persisted
expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
end
end
end
context 'management_project is non-existent' do
let(:management_project_id) { 0 }
include_examples 'invalid project or cluster permissions'
end
context 'project cluster' do
include_examples 'setting a management project'
include_examples 'setting a management project outside of scope'
end
context 'group cluster' do
let(:management_project_namespace) { create(:group) }
let(:clusterable) { management_project_namespace }
include_examples 'setting a management project'
include_examples 'setting a management project outside of scope'
end
context 'instance cluster' do
let(:clusterable) { Clusters::Instance.new }
include_examples 'setting a management project'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Management
class ValidateManagementProjectPermissionsService
attr_reader :current_user
def initialize(user = nil)
@current_user = user
end
def execute(cluster, management_project_id)
if management_project_id.present?
management_project = management_project_scope(cluster).find_by_id(management_project_id)
unless management_project && can_admin_pipeline_for_project?(management_project)
cluster.errors.add(:management_project_id, _('Project does not exist or you don\'t have permission to perform this action'))
return false
end
end
true
end
private
def can_admin_pipeline_for_project?(project)
Ability.allowed?(current_user, :admin_pipeline, project)
end
def management_project_scope(cluster)
return ::Project.all if cluster.instance_type?
group =
if cluster.group_type?
cluster.first_group
elsif cluster.project_type?
cluster.first_project&.namespace
end
# Prevent users from selecting nested projects until
# https://gitlab.com/gitlab-org/gitlab/issues/34650 is resolved
include_subgroups = cluster.group_type?
::GroupProjectsFinder.new(
group: group,
current_user: current_user,
options: { exclude_shared: true, include_subgroups: include_subgroups }
).execute
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Management::ValidateManagementProjectPermissionsService, feature_category: :deployment_management do
describe '#execute' do
subject { described_class.new(user).execute(cluster, management_project_id) }
let(:cluster) { build(:cluster, :project, projects: [create(:project)]) }
let(:user) { create(:user) }
context 'when management_project_id is nil' do
let(:management_project_id) { nil }
it { is_expected.to be true }
end
context 'when management_project_id is not nil' do
let(:management_project_id) { management_project.id }
let(:management_project_namespace) { create(:group) }
let(:management_project) { create(:project, namespace: management_project_namespace) }
context 'when management_project does not exist' do
let(:management_project_id) { 0 }
it 'adds errors to the cluster and returns false' do
is_expected.to eq false
expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
end
end
shared_examples 'management project is in scope' do
context 'when user is authorized to administer manangement_project' do
before do
management_project.add_maintainer(user)
end
it 'adds no error and returns true' do
is_expected.to eq true
expect(cluster.errors).to be_empty
end
end
context 'when user is not authorized to adminster manangement_project' do
it 'adds an error and returns false' do
is_expected.to eq false
expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
end
end
end
shared_examples 'management project is out of scope' do
context 'when manangement_project is outside of the namespace scope' do
let(:management_project_namespace) { create(:group) }
it 'adds an error and returns false' do
is_expected.to eq false
expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
end
end
end
context 'project cluster' do
let(:cluster) { build(:cluster, :project, projects: [create(:project, namespace: management_project_namespace)]) }
include_examples 'management project is in scope'
include_examples 'management project is out of scope'
end
context 'group cluster' do
let(:cluster) { build(:cluster, :group, groups: [management_project_namespace]) }
include_examples 'management project is in scope'
include_examples 'management project is out of scope'
end
context 'instance cluster' do
let(:cluster) { build(:cluster, :instance) }
include_examples 'management project is in scope'
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Agents
class CreateActivityEventService
def initialize(agent, **params)
@agent = agent
@params = params
end
def execute
agent.activity_events.create!(params)
DeleteExpiredEventsWorker.perform_at(schedule_cleanup_at, agent.id)
ServiceResponse.success
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, agent_id: agent.id)
ServiceResponse.error(message: e.message)
end
private
attr_reader :agent, :params
def schedule_cleanup_at
1.hour.from_now.change(min: agent.id % 60)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Agents::CreateActivityEventService, feature_category: :deployment_management do
let_it_be(:agent) { create(:cluster_agent) }
let_it_be(:token) { create(:cluster_agent_token, agent: agent) }
let_it_be(:user) { create(:user) }
describe '#execute' do
let(:params) do
{
kind: :token_created,
level: :info,
recorded_at: token.created_at,
user: user,
agent_token: token
}
end
subject { described_class.new(agent, **params).execute }
it 'creates an activity event record' do
expect { subject }.to change(agent.activity_events, :count).from(0).to(1)
event = agent.activity_events.last
expect(event).to have_attributes(
kind: 'token_created',
level: 'info',
recorded_at: token.reload.created_at,
user: user,
agent_token_id: token.id
)
end
it 'schedules the cleanup worker' do
expect(Clusters::Agents::DeleteExpiredEventsWorker).to receive(:perform_at)
.with(1.hour.from_now.change(min: agent.id % 60), agent.id)
subject
end
context 'when activity event creation fails' do
let(:params) { {} }
it 'tracks the exception without raising' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
.with(instance_of(ActiveRecord::RecordInvalid), agent_id: agent.id)
subject
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Agents
class AuthorizeProxyUserService < ::BaseService
include ::Gitlab::Utils::StrongMemoize
def initialize(current_user, agent)
@current_user = current_user
@agent = agent
end
def execute
return forbidden('`user_access` keyword is not found in agent config file.') unless user_access_config.present?
access_as = user_access_config['access_as']
return forbidden('`access_as` is not found under the `user_access` keyword.') unless access_as.present?
return forbidden('`access_as` must exist only once under the `user_access` keyword.') if access_as.size != 1
handle_access(access_as)
end
private
attr_reader :current_user, :agent
# Override in EE
def handle_access(access_as)
access_as_agent if access_as.key?('agent')
end
def authorizations
@authorizations ||= ::Clusters::Agents::Authorizations::UserAccess::Finder
.new(current_user, agent: agent).execute
end
def response_base
{
agent: {
id: agent.id,
config_project: { id: agent.project_id }
},
user: {
id: current_user.id,
username: current_user.username
}
}
end
def access_as_agent
if authorizations.empty?
return forbidden('You must be a member of `projects` or `groups` under the `user_access` keyword.')
end
success(payload: response_base.merge(access_as: { agent: {} }))
end
def user_access_config
agent.user_access_config
end
strong_memoize_attr :user_access_config
delegate :success, to: ServiceResponse, private: true
def forbidden(message)
ServiceResponse.error(reason: :forbidden, message: message)
end
end
end
end
Clusters::Agents::AuthorizeProxyUserService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Agents::AuthorizeProxyUserService, feature_category: :deployment_management do
subject(:service_response) { service.execute }
let(:service) { described_class.new(user, agent) }
let(:user) { create(:user) }
let_it_be(:organization) { create(:group) }
let_it_be(:configuration_project) { create(:project, group: organization) }
let_it_be(:agent) { create(:cluster_agent, name: 'the-agent', project: configuration_project) }
let_it_be(:deployment_project) { create(:project, group: organization) }
let_it_be(:deployment_group) { create(:group, parent: organization) }
let(:user_access_config) do
{
'user_access' => {
'access_as' => { 'agent' => {} },
'projects' => [{ 'id' => deployment_project.full_path }],
'groups' => [{ 'id' => deployment_group.full_path }]
}
}
end
before do
Clusters::Agents::Authorizations::UserAccess::RefreshService.new(agent, config: user_access_config).execute
end
it 'returns forbidden when user has no access to any project', :aggregate_failures do
expect(service_response).to be_error
expect(service_response.reason).to eq :forbidden
expect(service_response.message)
.to eq 'You must be a member of `projects` or `groups` under the `user_access` keyword.'
end
context 'when user is member of an authorized group' do
it 'authorizes developers', :aggregate_failures do
deployment_group.add_member(user, :developer)
expect(service_response).to be_success
expect(service_response.payload[:user]).to include(id: user.id, username: user.username)
expect(service_response.payload[:agent]).to include(id: agent.id, config_project: { id: agent.project.id })
end
it 'does not authorize reporters', :aggregate_failures do
deployment_group.add_member(user, :reporter)
expect(service_response).to be_error
expect(service_response.reason).to eq :forbidden
expect(service_response.message)
.to eq 'You must be a member of `projects` or `groups` under the `user_access` keyword.'
end
end
context 'when user is member of an authorized project' do
it 'authorizes developers', :aggregate_failures do
deployment_project.add_member(user, :developer)
expect(service_response).to be_success
expect(service_response.payload[:user]).to include(id: user.id, username: user.username)
expect(service_response.payload[:agent]).to include(id: agent.id, config_project: { id: agent.project.id })
end
it 'does not authorize reporters', :aggregate_failures do
deployment_project.add_member(user, :reporter)
expect(service_response).to be_error
expect(service_response.reason).to eq :forbidden
expect(service_response.message)
.to eq 'You must be a member of `projects` or `groups` under the `user_access` keyword.'
end
end
context 'when config is empty' do
let(:user_access_config) { {} }
it 'returns an error', :aggregate_failures do
expect(service_response).to be_error
expect(service_response.reason).to eq :forbidden
expect(service_response.message).to eq '`user_access` keyword is not found in agent config file.'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Agents
class DeleteExpiredEventsService
def initialize(agent)
@agent = agent
end
def execute
agent.activity_events
.recorded_before(remove_events_before)
.each_batch { |batch| batch.delete_all }
end
private
attr_reader :agent
def remove_events_before
agent.activity_event_deletion_cutoff
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Agents::DeleteExpiredEventsService, feature_category: :deployment_management do
let_it_be(:agent) { create(:cluster_agent) }
describe '#execute' do
let_it_be(:event1) { create(:agent_activity_event, agent: agent, recorded_at: 1.hour.ago) }
let_it_be(:event2) { create(:agent_activity_event, agent: agent, recorded_at: 2.hours.ago) }
let_it_be(:event3) { create(:agent_activity_event, agent: agent, recorded_at: 3.hours.ago) }
let_it_be(:event4) { create(:agent_activity_event, agent: agent, recorded_at: 4.hours.ago) }
let_it_be(:event5) { create(:agent_activity_event, agent: agent, recorded_at: 5.hours.ago) }
let(:deletion_cutoff) { 1.day.ago }
subject { described_class.new(agent).execute }
before do
allow(agent).to receive(:activity_event_deletion_cutoff).and_return(deletion_cutoff)
end
it 'does not delete events if the limit has not been reached' do
expect { subject }.not_to change(agent.activity_events, :count)
end
context 'there are more events than the limit' do
let(:deletion_cutoff) { event3.recorded_at }
it 'removes events to remain at the limit, keeping the most recent' do
expect { subject }.to change(agent.activity_events, :count).from(5).to(3)
expect(agent.activity_events).to contain_exactly(event1, event2, event3)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Agents
class CreateService < BaseService
def execute(name:)
return error_no_permissions unless cluster_agent_permissions?
agent = ::Clusters::Agent.new(name: name, project: project, created_by_user: current_user)
if agent.save
success.merge(cluster_agent: agent)
else
error(agent.errors.full_messages)
end
end
private
def cluster_agent_permissions?
current_user.can?(:admin_pipeline, project) && current_user.can?(:create_cluster, project)
end
def error_no_permissions
error(s_('ClusterAgent|You have insufficient permissions to create a cluster agent for this project'))
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Agents::CreateService, feature_category: :deployment_management do
subject(:service) { described_class.new(project, user) }
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
describe '#execute' do
context 'without user permissions' do
it 'returns errors when user does not have permissions' do
expect(service.execute(name: 'missing-permissions')).to eq({
status: :error,
message: 'You have insufficient permissions to create a cluster agent for this project'
})
end
end
context 'with user permissions' do
before do
project.add_maintainer(user)
end
it 'creates a new clusters_agent' do
expect { service.execute(name: 'with-user') }.to change { ::Clusters::Agent.count }.by(1)
end
it 'returns success status', :aggregate_failures do
result = service.execute(name: 'success')
expect(result[:status]).to eq(:success)
expect(result[:message]).to be_nil
end
it 'returns agent values', :aggregate_failures do
new_agent = service.execute(name: 'new-agent')[:cluster_agent]
expect(new_agent.name).to eq('new-agent')
expect(new_agent.created_by_user).to eq(user)
end
it 'generates an error message when name is invalid' do
expect(service.execute(name: '@bad_agent_name!')).to eq({
status: :error,
message: ["Name can contain only lowercase letters, digits, and '-', but cannot start or end with '-'"]
})
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Agents
class DeleteService < ::BaseContainerService
def execute(cluster_agent)
return error_no_permissions unless current_user.can?(:admin_cluster, cluster_agent)
if cluster_agent.destroy
ServiceResponse.success
else
ServiceResponse.error(message: cluster_agent.errors.full_messages)
end
end
private
def error_no_permissions
ServiceResponse.error(message: s_('ClusterAgent|You have insufficient permissions to delete this cluster agent'))
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Agents::DeleteService, feature_category: :deployment_management do
subject(:service) { described_class.new(container: project, current_user: user) }
let(:cluster_agent) { create(:cluster_agent) }
let(:project) { cluster_agent.project }
let(:user) { create(:user) }
describe '#execute' do
context 'without user permissions' do
it 'fails to delete when the user has no permissions', :aggregate_failures do
response = service.execute(cluster_agent)
expect(response.status).to eq(:error)
expect(response.message).to eq('You have insufficient permissions to delete this cluster agent')
expect { cluster_agent.reload }.not_to raise_error
end
end
context 'with user permissions' do
before do
project.add_maintainer(user)
end
it 'deletes a cluster agent', :aggregate_failures do
expect { service.execute(cluster_agent) }.to change { ::Clusters::Agent.count }.by(-1)
expect { cluster_agent.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Agents
module Authorizations
module UserAccess
class RefreshService
include Gitlab::Utils::StrongMemoize
AUTHORIZED_ENTITY_LIMIT = 100
delegate :project, to: :agent, private: true
delegate :root_ancestor, to: :project, private: true
def initialize(agent, config:)
@agent = agent
@config = config
end
def execute
refresh_projects!
refresh_groups!
true
end
private
attr_reader :agent, :config
def refresh_projects!
if allowed_project_configurations.present?
project_ids = allowed_project_configurations.map { |config| config.fetch(:project_id) }
agent.with_lock do
agent.user_access_project_authorizations.upsert_configs(allowed_project_configurations)
agent.user_access_project_authorizations.delete_unlisted(project_ids)
end
else
agent.user_access_project_authorizations.delete_all(:delete_all)
end
end
def refresh_groups!
if allowed_group_configurations.present?
group_ids = allowed_group_configurations.map { |config| config.fetch(:group_id) }
agent.with_lock do
agent.user_access_group_authorizations.upsert_configs(allowed_group_configurations)
agent.user_access_group_authorizations.delete_unlisted(group_ids)
end
else
agent.user_access_group_authorizations.delete_all(:delete_all)
end
end
def allowed_project_configurations
project_entries = extract_config_entries(entity: 'projects')
return unless project_entries
allowed_projects.where_full_path_in(project_entries.keys, use_includes: false).map do |project|
{ project_id: project.id, config: user_access_as }
end
end
strong_memoize_attr :allowed_project_configurations
def allowed_group_configurations
group_entries = extract_config_entries(entity: 'groups')
return unless group_entries
allowed_groups.where_full_path_in(group_entries.keys, use_includes: false).map do |group|
{ group_id: group.id, config: user_access_as }
end
end
strong_memoize_attr :allowed_group_configurations
def extract_config_entries(entity:)
config.dig('user_access', entity)
&.first(AUTHORIZED_ENTITY_LIMIT)
&.index_by { |config| config.delete('id').downcase }
end
def allowed_projects
root_ancestor.all_projects
end
def allowed_groups
if group_root_ancestor?
root_ancestor.self_and_descendants
else
::Group.none
end
end
def group_root_ancestor?
root_ancestor.group_namespace?
end
def user_access_as
@user_access_as ||= config['user_access']&.slice('access_as') || {}
end
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Agents::Authorizations::UserAccess::RefreshService, feature_category: :deployment_management do
describe '#execute' do
let_it_be(:root_ancestor) { create(:group) }
let_it_be(:agent_management_project) { create(:project, namespace: root_ancestor) }
let_it_be(:group_1) { create(:group, path: 'group-path-with-UPPERCASE', parent: root_ancestor) }
let_it_be(:group_2) { create(:group, parent: root_ancestor) }
let_it_be(:project_1) { create(:project, path: 'project-path-with-UPPERCASE', namespace: root_ancestor) }
let_it_be(:project_2) { create(:project, namespace: root_ancestor) }
let(:agent) { create(:cluster_agent, project: agent_management_project) }
let(:config) do
{
user_access: {
groups: [
{ id: group_2.full_path }
],
projects: [
{ id: project_2.full_path }
]
}
}.deep_merge(extra_config).deep_stringify_keys
end
let(:extra_config) { {} }
subject { described_class.new(agent, config: config).execute }
before do
agent.user_access_group_authorizations.create!(group: group_1, config: {})
agent.user_access_project_authorizations.create!(project: project_1, config: {})
end
shared_examples 'removing authorization' do
context 'when config contains no groups or projects' do
let(:config) { {} }
it 'removes all authorizations' do
expect(subject).to be_truthy
expect(authorizations).to be_empty
end
end
context 'when config contains groups or projects outside of the configuration project hierarchy' do
let_it_be(:agent_management_project) { create(:project, namespace: create(:group)) }
it 'removes all authorizations' do
expect(subject).to be_truthy
expect(authorizations).to be_empty
end
end
context 'when configuration project does not belong to a group' do
let_it_be(:agent_management_project) { create(:project) }
it 'removes all authorizations' do
expect(subject).to be_truthy
expect(authorizations).to be_empty
end
end
end
describe 'group authorization' do
it 'refreshes authorizations for the agent' do
expect(subject).to be_truthy
expect(agent.user_access_authorized_groups).to contain_exactly(group_2)
added_authorization = agent.user_access_group_authorizations.find_by(group: group_2)
expect(added_authorization.config).to eq({})
end
context 'when config contains "access_as" keyword' do
let(:extra_config) do
{
user_access: {
access_as: {
agent: {}
}
}
}
end
it 'refreshes authorizations for the agent' do
expect(subject).to be_truthy
expect(agent.user_access_authorized_groups).to contain_exactly(group_2)
added_authorization = agent.user_access_group_authorizations.find_by(group: group_2)
expect(added_authorization.config).to eq({ 'access_as' => { 'agent' => {} } })
end
end
context 'when config contains too many groups' do
before do
stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 0)
end
it 'authorizes groups up to the limit' do
expect(subject).to be_truthy
expect(agent.user_access_authorized_groups).to be_empty
end
end
include_examples 'removing authorization' do
let(:authorizations) { agent.user_access_authorized_groups }
end
end
describe 'project authorization' do
it 'refreshes authorizations for the agent' do
expect(subject).to be_truthy
expect(agent.user_access_authorized_projects).to contain_exactly(project_2)
added_authorization = agent.user_access_project_authorizations.find_by(project: project_2)
expect(added_authorization.config).to eq({})
end
context 'when config contains "access_as" keyword' do
let(:extra_config) do
{
user_access: {
access_as: {
agent: {}
}
}
}
end
it 'refreshes authorizations for the agent' do
expect(subject).to be_truthy
expect(agent.user_access_authorized_projects).to contain_exactly(project_2)
added_authorization = agent.user_access_project_authorizations.find_by(project: project_2)
expect(added_authorization.config).to eq({ 'access_as' => { 'agent' => {} } })
end
end
context 'when project belongs to a user namespace, and is in the same namespace as the agent' do
let_it_be(:root_ancestor) { create(:namespace) }
let_it_be(:agent_management_project) { create(:project, namespace: root_ancestor) }
let_it_be(:project_1) { create(:project, path: 'project-path-with-UPPERCASE', namespace: root_ancestor) }
let_it_be(:project_2) { create(:project, namespace: root_ancestor) }
it 'creates an authorization record for the project' do
expect(subject).to be_truthy
expect(agent.user_access_authorized_projects).to contain_exactly(project_2)
end
end
context 'when project belongs to a user namespace, and is authorizing itself' do
let_it_be(:root_ancestor) { create(:namespace) }
let_it_be(:agent_management_project) { create(:project, namespace: root_ancestor) }
let_it_be(:project_1) { create(:project, path: 'project-path-with-UPPERCASE', namespace: root_ancestor) }
let_it_be(:project_2) { agent_management_project }
it 'creates an authorization record for the project' do
expect(subject).to be_truthy
expect(agent.user_access_authorized_projects).to contain_exactly(project_2)
end
end
context 'when config contains too many projects' do
before do
stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 0)
end
it 'authorizes projects up to the limit' do
expect(subject).to be_truthy
expect(agent.user_access_authorized_projects).to be_empty
end
end
include_examples 'removing authorization' do
let(:authorizations) { agent.user_access_authorized_projects }
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Agents
module Authorizations
module CiAccess
class FilterService
def initialize(authorizations, filter_params)
@authorizations = authorizations
@filter_params = filter_params
@environments_matcher = {}
end
def execute
filter_by_environment(authorizations)
end
private
attr_reader :authorizations, :filter_params
def filter_by_environment(auths)
return auths unless filter_by_environment?
auths.select do |auth|
next true if auth.config['environments'].blank?
auth.config['environments'].any? { |environment_pattern| matches_environment?(environment_pattern) }
end
end
def filter_by_environment?
filter_params.has_key?(:environment)
end
def environment_filter
@environment_filter ||= filter_params[:environment]
end
def matches_environment?(environment_pattern)
return false if environment_filter.nil?
environments_matcher(environment_pattern).match?(environment_filter)
end
def environments_matcher(environment_pattern)
@environments_matcher[environment_pattern] ||= ::Gitlab::Ci::EnvironmentMatcher.new(environment_pattern)
end
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Agents::Authorizations::CiAccess::FilterService, feature_category: :continuous_integration do
describe '#execute' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let(:agent_authorizations_without_env) do
[
build(:agent_ci_access_project_authorization, project: project, agent: build(:cluster_agent, project: project)),
build(:agent_ci_access_group_authorization, group: group, agent: build(:cluster_agent, project: project)),
::Clusters::Agents::Authorizations::CiAccess::ImplicitAuthorization.new(agent: build(:cluster_agent, project: project))
]
end
let(:filter_params) { {} }
subject(:execute_filter) { described_class.new(agent_authorizations, filter_params).execute }
context 'when there are no filters' do
let(:agent_authorizations) { agent_authorizations_without_env }
it 'returns the authorizations as is' do
expect(execute_filter).to eq agent_authorizations
end
end
context 'when filtering by environment' do
let(:agent_authorizations_with_env) do
[
build(
:agent_ci_access_project_authorization,
project: project,
agent: build(:cluster_agent, project: project),
environments: ['staging', 'review/*', 'production']
),
build(
:agent_ci_access_group_authorization,
group: group,
agent: build(:cluster_agent, project: project),
environments: ['staging', 'review/*', 'production']
)
]
end
let(:agent_authorizations_with_different_env) do
[
build(
:agent_ci_access_project_authorization,
project: project,
agent: build(:cluster_agent, project: project),
environments: ['staging']
),
build(
:agent_ci_access_group_authorization,
group: group,
agent: build(:cluster_agent, project: project),
environments: ['staging']
)
]
end
let(:agent_authorizations) do
(
agent_authorizations_without_env +
agent_authorizations_with_env +
agent_authorizations_with_different_env
)
end
let(:filter_params) { { environment: 'production' } }
it 'returns the authorizations with the given environment AND authorizations without any environment' do
expected_authorizations = agent_authorizations_with_env + agent_authorizations_without_env
expect(execute_filter).to match_array expected_authorizations
end
context 'when environment filter has a wildcard' do
let(:filter_params) { { environment: 'review/123' } }
it 'returns the authorizations with matching environments AND authorizations without any environment' do
expected_authorizations = agent_authorizations_with_env + agent_authorizations_without_env
expect(execute_filter).to match_array expected_authorizations
end
end
context 'when environment filter is nil' do
let(:filter_params) { { environment: nil } }
it 'returns the authorizations without any environment' do
expect(execute_filter).to match_array agent_authorizations_without_env
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Agents
module Authorizations
module CiAccess
class RefreshService
include Gitlab::Utils::StrongMemoize
AUTHORIZED_ENTITY_LIMIT = 100
delegate :project, to: :agent, private: true
delegate :root_ancestor, to: :project, private: true
def initialize(agent, config:)
@agent = agent
@config = config
end
def execute
refresh_projects!
refresh_groups!
true
end
private
attr_reader :agent, :config
def refresh_projects!
if allowed_project_configurations.present?
project_ids = allowed_project_configurations.map { |config| config.fetch(:project_id) }
agent.with_lock do
agent.ci_access_project_authorizations.upsert_all(allowed_project_configurations, unique_by: [:agent_id, :project_id])
agent.ci_access_project_authorizations.where.not(project_id: project_ids).delete_all # rubocop: disable CodeReuse/ActiveRecord
end
else
agent.ci_access_project_authorizations.delete_all(:delete_all)
end
end
def refresh_groups!
if allowed_group_configurations.present?
group_ids = allowed_group_configurations.map { |config| config.fetch(:group_id) }
agent.with_lock do
agent.ci_access_group_authorizations.upsert_all(allowed_group_configurations, unique_by: [:agent_id, :group_id])
agent.ci_access_group_authorizations.where.not(group_id: group_ids).delete_all # rubocop: disable CodeReuse/ActiveRecord
end
else
agent.ci_access_group_authorizations.delete_all(:delete_all)
end
end
def allowed_project_configurations
strong_memoize(:allowed_project_configurations) do
project_entries = extract_config_entries(entity: 'projects')
if project_entries
allowed_projects.where_full_path_in(project_entries.keys).map do |project|
{ project_id: project.id, config: project_entries[project.full_path.downcase] }
end
end
end
end
def allowed_group_configurations
strong_memoize(:allowed_group_configurations) do
group_entries = extract_config_entries(entity: 'groups')
if group_entries
allowed_groups.where_full_path_in(group_entries.keys).map do |group|
{ group_id: group.id, config: group_entries[group.full_path.downcase] }
end
end
end
end
def extract_config_entries(entity:)
config.dig('ci_access', entity)
&.first(AUTHORIZED_ENTITY_LIMIT)
&.index_by { |config| config.delete('id').downcase }
end
def allowed_projects
root_ancestor.all_projects
end
def allowed_groups
if group_root_ancestor?
root_ancestor.self_and_descendants
else
::Group.none
end
end
def group_root_ancestor?
root_ancestor.group_namespace?
end
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Agents::Authorizations::CiAccess::RefreshService, feature_category: :deployment_management do
describe '#execute' do
let_it_be(:root_ancestor) { create(:group) }
let_it_be(:removed_group) { create(:group, parent: root_ancestor) }
let_it_be(:modified_group) { create(:group, parent: root_ancestor) }
let_it_be(:added_group) { create(:group, path: 'group-path-with-UPPERCASE', parent: root_ancestor) }
let_it_be(:removed_project) { create(:project, namespace: root_ancestor) }
let_it_be(:modified_project) { create(:project, namespace: root_ancestor) }
let_it_be(:added_project) { create(:project, path: 'project-path-with-UPPERCASE', namespace: root_ancestor) }
let(:project) { create(:project, namespace: root_ancestor) }
let(:agent) { create(:cluster_agent, project: project) }
let(:config) do
{
ci_access: {
groups: [
{ id: added_group.full_path, default_namespace: 'default' },
# Uppercase path verifies case-insensitive matching.
{ id: modified_group.full_path.upcase, default_namespace: 'new-namespace' }
],
projects: [
{ id: added_project.full_path, default_namespace: 'default' },
# Uppercase path verifies case-insensitive matching.
{ id: modified_project.full_path.upcase, default_namespace: 'new-namespace' }
]
}
}.deep_stringify_keys
end
subject { described_class.new(agent, config: config).execute }
before do
default_config = { default_namespace: 'default' }
agent.ci_access_group_authorizations.create!(group: removed_group, config: default_config)
agent.ci_access_group_authorizations.create!(group: modified_group, config: default_config)
agent.ci_access_project_authorizations.create!(project: removed_project, config: default_config)
agent.ci_access_project_authorizations.create!(project: modified_project, config: default_config)
end
shared_examples 'removing authorization' do
context 'config contains no groups' do
let(:config) { {} }
it 'removes all authorizations' do
expect(subject).to be_truthy
expect(authorizations).to be_empty
end
end
context 'config contains groups outside of the configuration project hierarchy' do
let(:project) { create(:project, namespace: create(:group)) }
it 'removes all authorizations' do
expect(subject).to be_truthy
expect(authorizations).to be_empty
end
end
context 'configuration project does not belong to a group' do
let(:project) { create(:project) }
it 'removes all authorizations' do
expect(subject).to be_truthy
expect(authorizations).to be_empty
end
end
end
describe 'group authorization' do
it 'refreshes authorizations for the agent' do
expect(subject).to be_truthy
expect(agent.ci_access_authorized_groups).to contain_exactly(added_group, modified_group)
added_authorization = agent.ci_access_group_authorizations.find_by(group: added_group)
expect(added_authorization.config).to eq({ 'default_namespace' => 'default' })
modified_authorization = agent.ci_access_group_authorizations.find_by(group: modified_group)
expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' })
end
context 'config contains too many groups' do
before do
stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 1)
end
it 'authorizes groups up to the limit' do
expect(subject).to be_truthy
expect(agent.ci_access_authorized_groups).to contain_exactly(added_group)
end
end
include_examples 'removing authorization' do
let(:authorizations) { agent.ci_access_authorized_groups }
end
end
describe 'project authorization' do
it 'refreshes authorizations for the agent' do
expect(subject).to be_truthy
expect(agent.ci_access_authorized_projects).to contain_exactly(added_project, modified_project)
added_authorization = agent.ci_access_project_authorizations.find_by(project: added_project)
expect(added_authorization.config).to eq({ 'default_namespace' => 'default' })
modified_authorization = agent.ci_access_project_authorizations.find_by(project: modified_project)
expect(modified_authorization.config).to eq({ 'default_namespace' => 'new-namespace' })
end
context 'project does not belong to a group, and is in the same namespace as the agent' do
let(:root_ancestor) { create(:namespace) }
let(:added_project) { create(:project, namespace: root_ancestor) }
it 'creates an authorization record for the project' do
expect(subject).to be_truthy
expect(agent.ci_access_authorized_projects).to contain_exactly(added_project)
end
end
context 'project does not belong to a group, and is authorizing itself' do
let(:root_ancestor) { create(:namespace) }
let(:added_project) { project }
it 'creates an authorization record for the project' do
expect(subject).to be_truthy
expect(agent.ci_access_authorized_projects).to contain_exactly(added_project)
end
end
context 'config contains too many projects' do
before do
stub_const("#{described_class}::AUTHORIZED_ENTITY_LIMIT", 1)
end
it 'authorizes projects up to the limit' do
expect(subject).to be_truthy
expect(agent.ci_access_authorized_projects).to contain_exactly(added_project)
end
end
include_examples 'removing authorization' do
let(:authorizations) { agent.ci_access_authorized_projects }
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Integrations
class CreateService < BaseContainerService
attr_accessor :cluster
def initialize(container:, cluster:, current_user: nil, params: {})
@cluster = cluster
super(container: container, current_user: current_user, params: params)
end
def execute
return ServiceResponse.error(message: 'Unauthorized') unless authorized?
integration.enabled = params[:enabled]
integration.save!
if integration.enabled?
ServiceResponse.success(message: s_('ClusterIntegration|Integration enabled'), payload: { integration: integration })
else
ServiceResponse.success(message: s_('ClusterIntegration|Integration disabled'), payload: { integration: integration })
end
end
private
def integration
@integration ||= \
case params[:application_type]
when 'prometheus'
cluster.find_or_build_integration_prometheus
else
raise ArgumentError, "invalid application_type: #{params[:application_type]}"
end
end
def authorized?
Ability.allowed?(current_user, :admin_cluster, cluster)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Integrations::CreateService, '#execute', feature_category: :deployment_management do
let_it_be(:project) { create(:project) }
let_it_be_with_reload(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
let(:service) do
described_class.new(container: project, cluster: cluster, current_user: project.first_owner, params: params)
end
shared_examples_for 'a cluster integration' do |application_type|
let(:integration) { cluster.public_send("integration_#{application_type}") }
context 'when enabled param is true' do
let(:params) do
{ application_type: application_type, enabled: true }
end
it 'creates a new enabled integration' do
expect(service.execute).to be_success
expect(integration).to be_present
expect(integration).to be_persisted
expect(integration).to be_enabled
end
end
context 'when enabled param is false' do
let(:params) do
{ application_type: application_type, enabled: false }
end
it 'creates a new disabled integration' do
expect(service.execute).to be_success
expect(integration).to be_present
expect(integration).to be_persisted
expect(integration).not_to be_enabled
end
end
context 'when integration already exists' do
before do
create(:"clusters_integrations_#{application_type}", cluster: cluster, enabled: false)
end
let(:params) do
{ application_type: application_type, enabled: true }
end
it 'updates the integration' do
expect(integration).not_to be_enabled
expect(service.execute).to be_success
expect(integration.reload).to be_enabled
end
end
end
it_behaves_like 'a cluster integration', 'prometheus'
context 'when application_type is invalid' do
let(:params) do
{ application_type: 'something_else', enabled: true }
end
it 'errors' do
expect { service.execute }.to raise_error(ArgumentError)
end
end
context 'when user is unauthorized' do
let(:params) do
{ application_type: 'prometheus', enabled: true }
end
let(:service) do
unauthorized_user = create(:user)
described_class.new(container: project, cluster: cluster, current_user: unauthorized_user, params: params)
end
it 'returns error and does not create a new integration record' do
expect(service.execute).to be_error
expect(cluster.integration_prometheus).to be_nil
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Cleanup
class ProjectNamespaceService < ::Clusters::Cleanup::BaseService
KUBERNETES_NAMESPACE_BATCH_SIZE = 100
def execute
delete_project_namespaces_in_batches
# Keep calling the worker untill all namespaces are deleted
if cluster.kubernetes_namespaces.exists?
return schedule_next_execution(Clusters::Cleanup::ProjectNamespaceWorker)
end
cluster.continue_cleanup!
end
private
def delete_project_namespaces_in_batches
kubernetes_namespaces_batch = cluster.kubernetes_namespaces.first(KUBERNETES_NAMESPACE_BATCH_SIZE)
kubernetes_namespaces_batch.each do |kubernetes_namespace|
log_event(:deleting_project_namespace, namespace: kubernetes_namespace.namespace)
begin
kubeclient_delete_namespace(kubernetes_namespace)
rescue Kubeclient::HttpError => e
# unauthorized, forbidden: GitLab's access has been revoked
# certificate verify failed: Cluster is probably gone forever
raise unless /unauthorized|forbidden|certificate verify failed/i.match?(e.message)
end
kubernetes_namespace.destroy!
end
end
def kubeclient_delete_namespace(kubernetes_namespace)
cluster.kubeclient&.delete_namespace(kubernetes_namespace.namespace)
rescue Kubeclient::ResourceNotFoundError
# The resources have already been deleted, possibly on a previous attempt that timed out
rescue Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError
# User gave an invalid cluster from the start, or deleted the endpoint before this job ran
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Cleanup::ProjectNamespaceService, feature_category: :deployment_management do
describe '#execute' do
subject { service.execute }
let!(:service) { described_class.new(cluster) }
let!(:cluster) { create(:cluster, :with_environments, :cleanup_removing_project_namespaces) }
let!(:logger) { service.send(:logger) }
let(:log_meta) do
{
service: described_class.name,
cluster_id: cluster.id,
execution_count: 0
}
end
let(:kubeclient_instance_double) do
instance_double(Gitlab::Kubernetes::KubeClient, delete_namespace: nil, delete_service_account: nil)
end
before do
allow_any_instance_of(Clusters::Cluster).to receive(:kubeclient).and_return(kubeclient_instance_double)
end
context 'when cluster has namespaces to be deleted' do
it 'deletes namespaces from cluster' do
expect(kubeclient_instance_double).to receive(:delete_namespace)
.with cluster.kubernetes_namespaces[0].namespace
expect(kubeclient_instance_double).to receive(:delete_namespace)
.with(cluster.kubernetes_namespaces[1].namespace)
subject
end
it 'deletes namespaces from database' do
expect { subject }.to change { cluster.kubernetes_namespaces.exists? }.from(true).to(false)
end
it 'schedules ::ServiceAccountWorker' do
expect(Clusters::Cleanup::ServiceAccountWorker).to receive(:perform_async).with(cluster.id)
subject
end
it 'logs all events' do
expect(logger).to receive(:info)
.with(
log_meta.merge(
event: :deleting_project_namespace,
namespace: cluster.kubernetes_namespaces[0].namespace))
expect(logger).to receive(:info)
.with(
log_meta.merge(
event: :deleting_project_namespace,
namespace: cluster.kubernetes_namespaces[1].namespace))
subject
end
context 'when cluster.kubeclient is nil' do
let(:kubeclient_instance_double) { nil }
it 'schedules ::ServiceAccountWorker' do
expect(Clusters::Cleanup::ServiceAccountWorker).to receive(:perform_async).with(cluster.id)
subject
end
it 'deletes namespaces from database' do
expect { subject }.to change { cluster.kubernetes_namespaces.exists? }.from(true).to(false)
end
end
end
context 'when cluster has no namespaces' do
let!(:cluster) { create(:cluster, :cleanup_removing_project_namespaces) }
it 'schedules Clusters::Cleanup::ServiceAccountWorker' do
expect(Clusters::Cleanup::ServiceAccountWorker).to receive(:perform_async).with(cluster.id)
subject
end
it 'transitions to cleanup_removing_service_account' do
expect { subject }
.to change { cluster.reload.cleanup_status_name }
.from(:cleanup_removing_project_namespaces)
.to(:cleanup_removing_service_account)
end
it 'does not try to delete namespaces' do
expect(kubeclient_instance_double).not_to receive(:delete_namespace)
subject
end
end
context 'when there is a Kubeclient::HttpError' do
let(:kubeclient_instance_double) do
instance_double(Gitlab::Kubernetes::KubeClient)
end
['Unauthorized', 'forbidden', 'Certificate verify Failed'].each do |message|
it 'schedules ::ServiceAccountWorker with accepted errors' do
allow(kubeclient_instance_double)
.to receive(:delete_namespace)
.and_raise(Kubeclient::HttpError.new(401, message, nil))
expect(Clusters::Cleanup::ServiceAccountWorker).to receive(:perform_async).with(cluster.id)
subject
end
end
it 'raises error with unaccepted errors' do
allow(kubeclient_instance_double)
.to receive(:delete_namespace)
.and_raise(Kubeclient::HttpError.new(401, 'unexpected message', nil))
expect { subject }.to raise_error(Kubeclient::HttpError)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Cleanup
class ServiceAccountService < ::Clusters::Cleanup::BaseService
def execute
delete_gitlab_service_account
log_event(:destroying_cluster)
cluster.destroy!
end
private
def delete_gitlab_service_account
log_event(:deleting_gitlab_service_account)
cluster.kubeclient&.delete_service_account(
::Clusters::Kubernetes::GITLAB_SERVICE_ACCOUNT_NAME,
::Clusters::Kubernetes::GITLAB_SERVICE_ACCOUNT_NAMESPACE
)
rescue Kubeclient::ResourceNotFoundError
# The resources have already been deleted, possibly on a previous attempt that timed out
rescue Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError
# User gave an invalid cluster from the start, or deleted the endpoint before this job ran
rescue Kubeclient::HttpError => e
# unauthorized, forbidden: GitLab's access has been revoked
# certificate verify failed: Cluster is probably gone forever
raise unless /unauthorized|forbidden|certificate verify failed/i.match?(e.message)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Cleanup::ServiceAccountService, feature_category: :deployment_management do
describe '#execute' do
subject { service.execute }
let!(:service) { described_class.new(cluster) }
let!(:cluster) { create(:cluster, :cleanup_removing_service_account) }
let!(:logger) { service.send(:logger) }
let(:log_meta) do
{
service: described_class.name,
cluster_id: cluster.id,
execution_count: 0
}
end
let(:kubeclient_instance_double) do
instance_double(Gitlab::Kubernetes::KubeClient, delete_namespace: nil, delete_service_account: nil)
end
before do
allow_any_instance_of(Clusters::Cluster).to receive(:kubeclient).and_return(kubeclient_instance_double)
end
it 'deletes gitlab service account' do
expect(kubeclient_instance_double).to receive(:delete_service_account)
.with(
::Clusters::Kubernetes::GITLAB_SERVICE_ACCOUNT_NAME,
::Clusters::Kubernetes::GITLAB_SERVICE_ACCOUNT_NAMESPACE)
subject
end
it 'logs all events' do
expect(logger).to receive(:info).with(log_meta.merge(event: :deleting_gitlab_service_account))
expect(logger).to receive(:info).with(log_meta.merge(event: :destroying_cluster))
subject
end
it 'deletes cluster' do
expect { subject }.to change { Clusters::Cluster.where(id: cluster.id).exists? }.from(true).to(false)
end
context 'when cluster.kubeclient is nil' do
let(:kubeclient_instance_double) { nil }
it 'deletes cluster' do
expect { subject }.to change { Clusters::Cluster.where(id: cluster.id).exists? }.from(true).to(false)
end
end
context 'when there is a Kubeclient::HttpError' do
['Unauthorized', 'forbidden', 'Certificate verify Failed'].each do |message|
context "with error:#{message}" do
before do
allow(kubeclient_instance_double)
.to receive(:delete_service_account)
.and_raise(Kubeclient::HttpError.new(401, message, nil))
end
it 'destroys cluster' do
expect { subject }.to change { Clusters::Cluster.where(id: cluster.id).exists? }.from(true).to(false)
end
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module AgentTokens
class TrackUsageService
# The `UPDATE_USED_COLUMN_EVERY` defines how often the token DB entry can be updated
UPDATE_USED_COLUMN_EVERY = (40.minutes..55.minutes)
delegate :agent, to: :token
def initialize(token)
@token = token
end
def execute
track_values = { last_used_at: Time.current.utc }
token.cache_attributes(track_values)
if can_update_track_values?
log_activity_event!(track_values[:last_used_at]) unless agent.connected?
# Use update_column so updated_at is skipped
token.update_columns(track_values)
end
end
private
attr_reader :token
def can_update_track_values?
# Use a random threshold to prevent beating DB updates.
last_used_at_max_age = Random.rand(UPDATE_USED_COLUMN_EVERY)
real_last_used_at = token.read_attribute(:last_used_at)
# Handle too many updates from high token traffic
real_last_used_at.nil? ||
(Time.current - real_last_used_at) >= last_used_at_max_age
end
def log_activity_event!(recorded_at)
Clusters::Agents::CreateActivityEventService.new(
agent,
kind: :agent_connected,
level: :info,
recorded_at: recorded_at,
agent_token: token
).execute
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::AgentTokens::TrackUsageService, feature_category: :deployment_management do
let_it_be(:agent) { create(:cluster_agent) }
describe '#execute', :clean_gitlab_redis_cache do
let(:agent_token) { create(:cluster_agent_token, agent: agent) }
subject { described_class.new(agent_token).execute }
context 'when last_used_at was updated recently' do
before do
agent_token.update!(last_used_at: 10.minutes.ago)
end
it 'updates cache but not database' do
expect { subject }.not_to change { agent_token.reload.read_attribute(:last_used_at) }
expect_redis_update
end
end
context 'when last_used_at was not updated recently' do
it 'updates cache and database' do
does_db_update
expect_redis_update
end
context 'with invalid token' do
before do
agent_token.description = SecureRandom.hex(2000)
end
it 'still updates caches and database' do
expect(agent_token).to be_invalid
does_db_update
expect_redis_update
end
end
context 'agent is not connected' do
before do
allow(agent).to receive(:connected?).and_return(false)
end
it 'creates an activity event' do
expect { subject }.to change { agent.activity_events.count }
event = agent.activity_events.last
expect(event).to have_attributes(
kind: 'agent_connected',
level: 'info',
recorded_at: agent_token.reload.read_attribute(:last_used_at),
agent_token: agent_token
)
end
end
context 'agent is connected' do
before do
allow(agent).to receive(:connected?).and_return(true)
end
it 'does not create an activity event' do
expect { subject }.not_to change { agent.activity_events.count }
end
end
end
def expect_redis_update
Gitlab::Redis::Cache.with do |redis|
redis_key = "cache:#{agent_token.class}:#{agent_token.id}:attributes"
expect(redis.get(redis_key)).to be_present
end
end
def does_db_update
expect { subject }.to change { agent_token.reload.read_attribute(:last_used_at) }
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module AgentTokens
class RevokeService
attr_reader :current_project, :current_user, :token
def initialize(token:, current_user:)
@token = token
@current_user = current_user
end
def execute
return error_no_permissions unless current_user.can?(:create_cluster, token.agent.project)
if token.revoke!
log_activity_event(token)
ServiceResponse.success
else
ServiceResponse.error(message: token.errors.full_messages)
end
end
private
def error_no_permissions
ServiceResponse.error(
message: s_('ClusterAgent|User has insufficient permissions to revoke the token for this project'))
end
def log_activity_event(token)
Clusters::Agents::CreateActivityEventService.new(
token.agent,
kind: :token_revoked,
level: :info,
recorded_at: token.updated_at,
user: current_user,
agent_token: token
).execute
end
end
end
end
Clusters::AgentTokens::RevokeService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::AgentTokens::RevokeService, feature_category: :deployment_management do
describe '#execute' do
subject { described_class.new(token: agent_token, current_user: user).execute }
let(:agent) { create(:cluster_agent) }
let(:agent_token) { create(:cluster_agent_token, agent: agent) }
let(:project) { agent.project }
let(:user) { agent.created_by_user }
before do
project.add_maintainer(user)
end
context 'when user is authorized' do
before do
project.add_maintainer(user)
end
context 'when user revokes agent token' do
it 'succeeds' do
subject
expect(agent_token.revoked?).to be true
end
it 'creates an activity event' do
expect { subject }.to change { ::Clusters::Agents::ActivityEvent.count }.by(1)
event = agent.activity_events.last
expect(event).to have_attributes(
kind: 'token_revoked',
level: 'info',
recorded_at: agent_token.reload.updated_at,
user: user,
agent_token: agent_token
)
end
end
context 'when there is a validation failure' do
before do
agent_token.name = '' # make the record invalid, as we require a name to be present
end
it 'fails without raising an error', :aggregate_failures do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq(["Name can't be blank"])
end
it 'does not create an activity event' do
expect { subject }.not_to change { ::Clusters::Agents::ActivityEvent.count }
end
end
end
context 'when user is not authorized' do
let(:user) { create(:user) }
before do
project.add_guest(user)
end
context 'when user attempts to revoke agent token' do
it 'fails' do
subject
expect(agent_token.revoked?).to be false
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module AgentTokens
class CreateService
ALLOWED_PARAMS = %i[agent_id description name].freeze
ACTIVE_TOKENS_LIMIT = 2
attr_reader :agent, :current_user, :params
def initialize(agent:, current_user:, params:)
@agent = agent
@current_user = current_user
@params = params
end
def execute
return error_no_permissions unless current_user.can?(:create_cluster, agent.project)
return error_active_tokens_limit_reached if active_tokens_limit_reached?
token = ::Clusters::AgentToken.new(filtered_params.merge(agent_id: agent.id, created_by_user: current_user))
if token.save
log_activity_event(token)
ServiceResponse.success(payload: { secret: token.token, token: token })
else
ServiceResponse.error(message: token.errors.full_messages)
end
end
private
def error_no_permissions
ServiceResponse.error(message: s_('ClusterAgent|User has insufficient permissions to create a token for this project'))
end
def error_active_tokens_limit_reached
ServiceResponse.error(message: s_('ClusterAgent|An agent can have only two active tokens at a time'))
end
def active_tokens_limit_reached?
::Clusters::AgentTokensFinder.new(agent, current_user, status: :active).execute.count >= ACTIVE_TOKENS_LIMIT
end
def filtered_params
params.slice(*ALLOWED_PARAMS)
end
def log_activity_event(token)
Clusters::Agents::CreateActivityEventService.new(
token.agent,
kind: :token_created,
level: :info,
recorded_at: token.created_at,
user: current_user,
agent_token: token
).execute
end
end
end
end
Clusters::AgentTokens::CreateService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::AgentTokens::CreateService, feature_category: :deployment_management do
subject(:service) { described_class.new(agent: cluster_agent, current_user: user, params: params) }
let_it_be(:user) { create(:user) }
let(:cluster_agent) { create(:cluster_agent) }
let(:project) { cluster_agent.project }
let(:params) { { description: 'token description', name: 'token name' } }
describe '#execute' do
subject { service.execute }
it 'does not create a new token due to user permissions' do
expect { subject }.not_to change(::Clusters::AgentToken, :count)
end
it 'returns permission errors', :aggregate_failures do
expect(subject.status).to eq(:error)
expect(subject.message).to eq('User has insufficient permissions to create a token for this project')
end
context 'with user permissions' do
before do
project.add_maintainer(user)
end
it 'creates a new token' do
expect { subject }.to change { ::Clusters::AgentToken.count }.by(1)
end
it 'returns success status', :aggregate_failures do
expect(subject.status).to eq(:success)
expect(subject.message).to be_nil
end
it 'returns token information', :aggregate_failures do
token = subject.payload[:token]
expect(subject.payload[:secret]).not_to be_nil
expect(token.created_by_user).to eq(user)
expect(token.description).to eq(params[:description])
expect(token.name).to eq(params[:name])
end
it 'creates an activity event' do
expect { subject }.to change { ::Clusters::Agents::ActivityEvent.count }.by(1)
token = subject.payload[:token].reload
event = cluster_agent.activity_events.last
expect(event).to have_attributes(
kind: 'token_created',
level: 'info',
recorded_at: token.created_at,
user: token.created_by_user,
agent_token: token
)
end
context 'when params are invalid' do
let(:params) { { agent_id: 'bad_id' } }
it 'does not create a new token' do
expect { subject }.not_to change(::Clusters::AgentToken, :count)
end
it 'does not create an activity event' do
expect { subject }.not_to change { ::Clusters::Agents::ActivityEvent.count }
end
it 'returns validation errors', :aggregate_failures do
expect(subject.status).to eq(:error)
expect(subject.message).to eq(["Name can't be blank"])
end
end
context 'when the active agent tokens limit is reached' do
before do
create(:cluster_agent_token, agent: cluster_agent)
create(:cluster_agent_token, agent: cluster_agent)
end
it 'returns an error' do
expect(subject.status).to eq(:error)
expect(subject.message).to eq('An agent can have only two active tokens at a time')
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Kubernetes
class FetchKubernetesTokenService
DEFAULT_TOKEN_RETRY_DELAY = 5.seconds
TOKEN_RETRY_LIMIT = 5
attr_reader :kubeclient, :service_account_token_name, :namespace
def initialize(kubeclient, service_account_token_name, namespace, token_retry_delay: DEFAULT_TOKEN_RETRY_DELAY)
@kubeclient = kubeclient
@service_account_token_name = service_account_token_name
@namespace = namespace
@token_retry_delay = token_retry_delay
end
def execute
# Kubernetes will create the Secret and set the token asynchronously
# so it is necessary to retry
# https://kubernetes.io/docs/reference/access-authn-authz/service-accounts-admin/#token-controller
TOKEN_RETRY_LIMIT.times do
token_base64 = get_secret&.dig('data', 'token')
return Base64.decode64(token_base64) if token_base64
sleep @token_retry_delay
end
nil
end
private
def get_secret
kubeclient.get_secret(service_account_token_name, namespace).as_json
rescue Kubeclient::ResourceNotFoundError
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService, feature_category: :deployment_management do
include KubernetesHelpers
describe '#execute' do
let(:api_url) { 'http://111.111.111.111' }
let(:namespace) { 'my-namespace' }
let(:service_account_token_name) { 'gitlab-token' }
let(:kubeclient) do
Gitlab::Kubernetes::KubeClient.new(
api_url,
auth_options: { username: 'admin', password: 'xxx' }
)
end
subject { described_class.new(kubeclient, service_account_token_name, namespace, token_retry_delay: 0).execute }
before do
stub_kubeclient_discover(api_url)
end
context 'when params correct' do
let(:decoded_token) { 'xxx.token.xxx' }
let(:token) { Base64.encode64(decoded_token) }
context 'when the secret exists' do
before do
stub_kubeclient_get_secret(
api_url,
metadata_name: service_account_token_name,
namespace: namespace,
token: token
)
end
it { is_expected.to eq(decoded_token) }
end
context 'when there is a 500 error' do
before do
stub_kubeclient_get_secret_error(api_url, service_account_token_name, namespace: namespace, status: 500)
end
it { expect { subject }.to raise_error(Kubeclient::HttpError) }
end
context 'when the secret does not exist on the first try' do
before do
stub_kubeclient_get_secret_not_found_then_found(
api_url,
metadata_name: service_account_token_name,
namespace: namespace,
token: token
)
end
it 'retries and finds the token' do
expect(subject).to eq(decoded_token)
end
end
context 'when the secret permanently does not exist' do
before do
stub_kubeclient_get_secret_error(api_url, service_account_token_name, namespace: namespace, status: 404)
end
it { is_expected.to be_nil }
end
context 'when the secret is missing a token on the first try' do
before do
stub_kubeclient_get_secret_missing_token_then_with_token(
api_url,
metadata_name: service_account_token_name,
namespace: namespace,
token: token
)
end
it 'retries and finds the token' do
expect(subject).to eq(decoded_token)
end
end
context 'when the secret is permanently missing a token' do
before do
stub_kubeclient_get_secret(
api_url,
metadata_name: service_account_token_name,
namespace: namespace,
token: nil
)
end
it { is_expected.to be_nil }
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Kubernetes
class CreateOrUpdateServiceAccountService
def initialize(kubeclient, service_account_name:, service_account_namespace:, token_name:, rbac:, service_account_namespace_labels: nil, namespace_creator: false, role_binding_name: nil)
@kubeclient = kubeclient
@service_account_name = service_account_name
@service_account_namespace = service_account_namespace
@service_account_namespace_labels = service_account_namespace_labels
@token_name = token_name
@rbac = rbac
@namespace_creator = namespace_creator
@role_binding_name = role_binding_name
end
def self.gitlab_creator(kubeclient, rbac:)
self.new(
kubeclient,
service_account_name: Clusters::Kubernetes::GITLAB_SERVICE_ACCOUNT_NAME,
service_account_namespace: Clusters::Kubernetes::GITLAB_SERVICE_ACCOUNT_NAMESPACE,
token_name: Clusters::Kubernetes::GITLAB_ADMIN_TOKEN_NAME,
rbac: rbac
)
end
def self.namespace_creator(kubeclient, service_account_name:, service_account_namespace:, service_account_namespace_labels:, rbac:)
self.new(
kubeclient,
service_account_name: service_account_name,
service_account_namespace: service_account_namespace,
service_account_namespace_labels: service_account_namespace_labels,
token_name: "#{service_account_namespace}-token",
rbac: rbac,
namespace_creator: true,
role_binding_name: "gitlab-#{service_account_namespace}"
)
end
def execute
ensure_project_namespace_exists if namespace_creator
kubeclient.create_or_update_service_account(service_account_resource)
kubeclient.create_or_update_secret(service_account_token_resource)
return unless rbac
create_role_or_cluster_role_binding
return unless namespace_creator
create_or_update_knative_serving_role
create_or_update_knative_serving_role_binding
create_or_update_crossplane_database_role
create_or_update_crossplane_database_role_binding
end
private
attr_reader :kubeclient, :service_account_name, :service_account_namespace, :service_account_namespace_labels, :token_name, :rbac, :namespace_creator, :role_binding_name
def ensure_project_namespace_exists
Gitlab::Kubernetes::Namespace.new(
service_account_namespace,
kubeclient,
labels: service_account_namespace_labels
).ensure_exists!
end
def create_role_or_cluster_role_binding
if namespace_creator
begin
kubeclient.delete_role_binding(role_binding_name, service_account_namespace)
rescue Kubeclient::ResourceNotFoundError
# Do nothing as we will create new role binding below
end
kubeclient.update_role_binding(role_binding_resource)
else
kubeclient.create_or_update_cluster_role_binding(cluster_role_binding_resource)
end
end
def create_or_update_knative_serving_role
kubeclient.update_role(knative_serving_role_resource)
end
def create_or_update_knative_serving_role_binding
kubeclient.update_role_binding(knative_serving_role_binding_resource)
end
def create_or_update_crossplane_database_role
kubeclient.update_role(crossplane_database_role_resource)
end
def create_or_update_crossplane_database_role_binding
kubeclient.update_role_binding(crossplane_database_role_binding_resource)
end
def service_account_resource
Gitlab::Kubernetes::ServiceAccount.new(
service_account_name,
service_account_namespace
).generate
end
def service_account_token_resource
Gitlab::Kubernetes::ServiceAccountToken.new(
token_name,
service_account_name,
service_account_namespace
).generate
end
def cluster_role_binding_resource
subjects = [{ kind: 'ServiceAccount', name: service_account_name, namespace: service_account_namespace }]
Gitlab::Kubernetes::ClusterRoleBinding.new(
Clusters::Kubernetes::GITLAB_CLUSTER_ROLE_BINDING_NAME,
Clusters::Kubernetes::GITLAB_CLUSTER_ROLE_NAME,
subjects
).generate
end
def role_binding_resource
Gitlab::Kubernetes::RoleBinding.new(
name: role_binding_name,
role_name: Clusters::Kubernetes::PROJECT_CLUSTER_ROLE_NAME,
role_kind: :ClusterRole,
namespace: service_account_namespace,
service_account_name: service_account_name
).generate
end
def knative_serving_role_resource
Gitlab::Kubernetes::Role.new(
name: Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME,
namespace: service_account_namespace,
rules: [{
apiGroups: %w[serving.knative.dev],
resources: %w[configurations configurationgenerations routes revisions revisionuids autoscalers services],
verbs: %w[get list create update delete patch watch]
}]
).generate
end
def knative_serving_role_binding_resource
Gitlab::Kubernetes::RoleBinding.new(
name: Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME,
role_name: Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME,
role_kind: :Role,
namespace: service_account_namespace,
service_account_name: service_account_name
).generate
end
def crossplane_database_role_resource
Gitlab::Kubernetes::Role.new(
name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME,
namespace: service_account_namespace,
rules: [{
apiGroups: %w[database.crossplane.io],
resources: %w[postgresqlinstances],
verbs: %w[get list create watch]
}]
).generate
end
def crossplane_database_role_binding_resource
Gitlab::Kubernetes::RoleBinding.new(
name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME,
role_name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME,
role_kind: :Role,
namespace: service_account_namespace,
service_account_name: service_account_name
).generate
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService, feature_category: :deployment_management do
include KubernetesHelpers
let(:api_url) { 'http://111.111.111.111' }
let(:platform_kubernetes) { cluster.platform_kubernetes }
let(:cluster_project) { cluster.cluster_project }
let(:project) { cluster_project.project }
let(:cluster) do
create(
:cluster,
:project,
:provided_by_gcp,
platform_kubernetes: create(:cluster_platform_kubernetes, :configured)
)
end
let(:kubeclient) do
Gitlab::Kubernetes::KubeClient.new(
api_url,
auth_options: { username: 'admin', password: 'xxx' }
)
end
shared_examples 'creates service account and token' do
it 'creates a kubernetes service account' do
subject
expect(WebMock).to have_requested(:post, api_url + "/api/v1/namespaces/#{namespace}/serviceaccounts").with(
body: hash_including(
kind: 'ServiceAccount',
metadata: { name: service_account_name, namespace: namespace }
)
)
end
it 'creates a kubernetes secret' do
subject
expect(WebMock).to have_requested(:post, api_url + "/api/v1/namespaces/#{namespace}/secrets").with(
body: hash_including(
kind: 'Secret',
metadata: {
name: token_name,
namespace: namespace,
annotations: {
'kubernetes.io/service-account.name': service_account_name
}
},
type: 'kubernetes.io/service-account-token'
)
)
end
end
before do
stub_kubeclient_discover(api_url)
stub_kubeclient_get_namespace(api_url, namespace: namespace)
stub_kubeclient_get_service_account_error(api_url, service_account_name, namespace: namespace)
stub_kubeclient_create_service_account(api_url, namespace: namespace)
stub_kubeclient_get_secret_error(api_url, token_name, namespace: namespace)
stub_kubeclient_create_secret(api_url, namespace: namespace)
end
describe '.gitlab_creator' do
let(:namespace) { 'default' }
let(:service_account_name) { 'gitlab' }
let(:token_name) { 'gitlab-token' }
subject { described_class.gitlab_creator(kubeclient, rbac: rbac).execute }
context 'with ABAC cluster' do
let(:rbac) { false }
it_behaves_like 'creates service account and token'
end
context 'with RBAC cluster' do
let(:rbac) { true }
let(:cluster_role_binding_name) { 'gitlab-admin' }
before do
cluster.platform_kubernetes.rbac!
stub_kubeclient_put_cluster_role_binding(api_url, cluster_role_binding_name)
end
it_behaves_like 'creates service account and token'
it 'creates a cluster role binding with cluster-admin access' do
subject
expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/gitlab-admin").with(
body: hash_including(
metadata: { name: 'gitlab-admin' },
roleRef: {
apiGroup: 'rbac.authorization.k8s.io',
kind: 'ClusterRole',
name: 'cluster-admin'
},
subjects: [
{
kind: 'ServiceAccount',
name: service_account_name,
namespace: namespace
}
]
)
)
end
end
end
describe '.namespace_creator' do
let(:namespace) { "#{project.path}-#{project.id}" }
let(:namespace_labels) { { app: project.full_path_slug, env: "staging" } }
let(:service_account_name) { "#{namespace}-service-account" }
let(:token_name) { "#{namespace}-token" }
subject do
described_class.namespace_creator(
kubeclient,
service_account_name: service_account_name,
service_account_namespace: namespace,
service_account_namespace_labels: namespace_labels,
rbac: rbac
).execute
end
context 'with ABAC cluster' do
let(:rbac) { false }
it_behaves_like 'creates service account and token'
end
context 'With RBAC enabled cluster' do
let(:rbac) { true }
let(:role_binding_name) { "gitlab-#{namespace}" }
before do
cluster.platform_kubernetes.rbac!
stub_kubeclient_delete_role_binding(api_url, role_binding_name, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, role_binding_name, namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace)
end
it 'creates a namespace object' do
kubernetes_namespace = double(Gitlab::Kubernetes::Namespace)
expect(Gitlab::Kubernetes::Namespace).to(
receive(:new).with(namespace, kubeclient, labels: namespace_labels).and_return(kubernetes_namespace)
)
expect(kubernetes_namespace).to receive(:ensure_exists!)
subject
end
it_behaves_like 'creates service account and token'
it 'creates a namespaced role binding with admin access' do
subject
expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{role_binding_name}").with(
body: hash_including(
metadata: { name: "gitlab-#{namespace}", namespace: namespace.to_s },
roleRef: {
apiGroup: 'rbac.authorization.k8s.io',
kind: 'ClusterRole',
name: 'admin'
},
subjects: [
{
kind: 'ServiceAccount',
name: service_account_name,
namespace: namespace
}
]
)
)
end
it 'creates a role binding granting crossplane database permissions to the service account' do
subject
expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME}").with(
body: hash_including(
metadata: {
name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME,
namespace: namespace
},
roleRef: {
apiGroup: 'rbac.authorization.k8s.io',
kind: 'Role',
name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME
},
subjects: [
{
kind: 'ServiceAccount',
name: service_account_name,
namespace: namespace
}
]
)
)
end
it 'creates a role and role binding granting knative serving permissions to the service account' do
subject
expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/roles/#{Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME}").with(
body: hash_including(
metadata: {
name: Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME,
namespace: namespace
},
rules: [{
apiGroups: %w[serving.knative.dev],
resources: %w[configurations configurationgenerations routes revisions revisionuids autoscalers services],
verbs: %w[get list create update delete patch watch]
}]
)
)
end
it 'creates a role and role binding granting crossplane database permissions to the service account' do
subject
expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/roles/#{Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME}").with(
body: hash_including(
metadata: {
name: Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME,
namespace: namespace
},
rules: [{
apiGroups: %w[database.crossplane.io],
resources: %w[postgresqlinstances],
verbs: %w[get list create watch]
}]
)
)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Clusters
module Kubernetes
class CreateOrUpdateNamespaceService
def initialize(cluster:, kubernetes_namespace:)
@cluster = cluster
@kubernetes_namespace = kubernetes_namespace
@platform = cluster.platform
end
def execute
create_project_service_account
configure_kubernetes_token
kubernetes_namespace.save!
end
private
attr_reader :cluster, :kubernetes_namespace, :platform
def create_project_service_account
environment_slug = kubernetes_namespace.environment&.slug
namespace_labels = { 'app.gitlab.com/app' => kubernetes_namespace.project.full_path_slug }
namespace_labels['app.gitlab.com/env'] = environment_slug if environment_slug
Clusters::Kubernetes::CreateOrUpdateServiceAccountService.namespace_creator(
platform.kubeclient,
service_account_name: kubernetes_namespace.service_account_name,
service_account_namespace: kubernetes_namespace.namespace,
service_account_namespace_labels: namespace_labels,
rbac: platform.rbac?
).execute
end
def configure_kubernetes_token
kubernetes_namespace.service_account_token = fetch_service_account_token
end
def fetch_service_account_token
Clusters::Kubernetes::FetchKubernetesTokenService.new(
platform.kubeclient,
kubernetes_namespace.token_name,
kubernetes_namespace.namespace
).execute
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute', feature_category: :deployment_management do
include KubernetesHelpers
let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
let(:platform) { cluster.platform }
let(:api_url) { 'https://kubernetes.example.com' }
let(:project) { cluster.project }
let(:environment) { create(:environment, project: project) }
let(:cluster_project) { cluster.cluster_project }
let(:namespace) { "#{project.path}-#{project.id}-#{environment.slug}" }
subject do
described_class.new(
cluster: cluster,
kubernetes_namespace: kubernetes_namespace
).execute
end
before do
stub_kubeclient_discover(api_url)
stub_kubeclient_get_namespace(api_url)
stub_kubeclient_get_service_account_error(api_url, 'gitlab')
stub_kubeclient_create_service_account(api_url)
stub_kubeclient_get_secret_error(api_url, 'gitlab-token')
stub_kubeclient_create_secret(api_url)
stub_kubeclient_delete_role_binding(api_url, "gitlab-#{namespace}", namespace: namespace)
stub_kubeclient_put_role_binding(api_url, "gitlab-#{namespace}", namespace: namespace)
stub_kubeclient_get_namespace(api_url, namespace: namespace)
stub_kubeclient_get_service_account_error(api_url, "#{namespace}-service-account", namespace: namespace)
stub_kubeclient_create_service_account(api_url, namespace: namespace)
stub_kubeclient_create_secret(api_url, namespace: namespace)
stub_kubeclient_put_secret(api_url, "#{namespace}-token", namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_NAME, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME, namespace: namespace)
stub_kubeclient_put_role(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_NAME, namespace: namespace)
stub_kubeclient_put_role_binding(api_url, Clusters::Kubernetes::GITLAB_CROSSPLANE_DATABASE_ROLE_BINDING_NAME, namespace: namespace)
stub_kubeclient_get_secret(
api_url,
metadata_name: "#{namespace}-token",
token: Base64.encode64('sample-token'),
namespace: namespace
)
end
shared_examples 'successful creation of kubernetes namespace' do
it 'creates a Clusters::KubernetesNamespace' do
expect do
subject
end.to change(Clusters::KubernetesNamespace, :count).by(1)
end
it 'creates project service account and namespace' do
account_service = double(Clusters::Kubernetes::CreateOrUpdateServiceAccountService)
expect(Clusters::Kubernetes::CreateOrUpdateServiceAccountService).to(
receive(:namespace_creator).with(
cluster.platform.kubeclient,
service_account_name: "#{namespace}-service-account",
service_account_namespace: namespace,
service_account_namespace_labels: {
'app.gitlab.com/app' => project.full_path_slug,
'app.gitlab.com/env' => environment.slug
},
rbac: true
).and_return(account_service)
)
expect(account_service).to receive(:execute).once
subject
end
it 'configures kubernetes token' do
subject
kubernetes_namespace.reload
expect(kubernetes_namespace.namespace).to eq(namespace)
expect(kubernetes_namespace.service_account_name).to eq("#{namespace}-service-account")
expect(kubernetes_namespace.encrypted_service_account_token).to be_present
end
context 'without environment' do
before do
kubernetes_namespace.environment = nil
end
it 'creates project service account and namespace' do
account_service = double(Clusters::Kubernetes::CreateOrUpdateServiceAccountService)
expect(Clusters::Kubernetes::CreateOrUpdateServiceAccountService).to(
receive(:namespace_creator).with(
cluster.platform.kubeclient,
service_account_name: "#{namespace}-service-account",
service_account_namespace: namespace,
service_account_namespace_labels: {
'app.gitlab.com/app' => project.full_path_slug
},
rbac: true
).and_return(account_service)
)
expect(account_service).to receive(:execute).once
subject
end
end
end
context 'group clusters' do
let(:cluster) { create(:cluster, :group, :provided_by_gcp) }
let(:group) { cluster.group }
let(:project) { create(:project, group: group) }
context 'when kubernetes namespace is not persisted' do
let(:kubernetes_namespace) do
build(
:cluster_kubernetes_namespace,
cluster: cluster,
project: project,
environment: environment
)
end
it_behaves_like 'successful creation of kubernetes namespace'
end
end
context 'project clusters' do
context 'when kubernetes namespace is not persisted' do
let(:kubernetes_namespace) do
build(
:cluster_kubernetes_namespace,
cluster: cluster,
project: cluster_project.project,
cluster_project: cluster_project,
environment: environment
)
end
it_behaves_like 'successful creation of kubernetes namespace'
end
context 'when there is a Kubernetes Namespace associated' do
let(:namespace) { "new-namespace-#{environment.slug}" }
let(:kubernetes_namespace) do
create(
:cluster_kubernetes_namespace,
cluster: cluster,
project: cluster_project.project,
cluster_project: cluster_project,
environment: environment
)
end
before do
platform.update_column(:namespace, 'new-namespace')
end
it 'does not create any Clusters::KubernetesNamespace' do
subject
expect(cluster.kubernetes_namespaces).to eq([kubernetes_namespace])
end
it 'creates project service account' do
expect_next_instance_of(Clusters::Kubernetes::CreateOrUpdateServiceAccountService) do |instance|
expect(instance).to receive(:execute).once
end
subject
end
it 'updates Clusters::KubernetesNamespace' do
subject
kubernetes_namespace.reload
expect(kubernetes_namespace.namespace).to eq(namespace)
expect(kubernetes_namespace.service_account_name).to eq("#{namespace}-service-account")
expect(kubernetes_namespace.encrypted_service_account_token).to be_present
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module CustomerRelations
module Organizations
class UpdateService < BaseService
def execute(organization)
return error_no_permissions unless allowed?
handle_active_param
return error_updating(organization) unless organization.update(params)
ServiceResponse.success(payload: organization)
end
private
def handle_active_param
return if params[:active].nil?
active = params.delete(:active)
params[:state] = active ? 'active' : 'inactive'
end
def error_no_permissions
error('You have insufficient permissions to update an organization for this group')
end
def error_updating(organization)
error(organization&.errors&.full_messages || 'Failed to update organization')
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe CustomerRelations::Organizations::UpdateService, feature_category: :service_desk do
let_it_be(:user) { create(:user) }
let(:crm_organization) { create(:crm_organization, name: 'Test', group: group, state: 'active') }
subject(:update) { described_class.new(group: group, current_user: user, params: params).execute(crm_organization) }
describe '#execute' do
context 'when the user has no permission' do
let_it_be(:group) { create(:group, :crm_enabled) }
let(:params) { { name: 'GitLab' } }
it 'returns an error' do
response = update
expect(response).to be_error
expect(response.message).to eq(['You have insufficient permissions to update an organization for this group'])
end
end
context 'when user has permission' do
let_it_be(:group) { create(:group, :crm_enabled) }
before_all do
group.add_developer(user)
end
context 'when name is changed' do
let(:params) { { name: 'GitLab' } }
it 'updates the crm_organization' do
response = update
expect(response).to be_success
expect(response.payload.name).to eq('GitLab')
end
end
context 'when activating' do
let(:crm_organization) { create(:crm_organization, state: 'inactive') }
let(:params) { { active: true } }
it 'updates the contact' do
response = update
expect(response).to be_success
expect(response.payload.active?).to be_truthy
end
end
context 'when deactivating' do
let(:params) { { active: false } }
it 'updates the crm_organization' do
response = update
expect(response).to be_success
expect(response.payload.active?).to be_falsy
end
end
context 'when the crm_organization is invalid' do
let(:params) { { name: nil } }
it 'returns an error' do
response = update
expect(response).to be_error
expect(response.message).to eq(["Name can't be blank"])
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module CustomerRelations
module Organizations
class CreateService < BaseService
# returns the created organization
def execute
return error_no_permissions unless allowed?
organization = Organization.create(params.merge(group_id: group.id))
return error_creating(organization) unless organization.persisted?
ServiceResponse.success(payload: organization)
end
private
def error_no_permissions
error('You have insufficient permissions to create an organization for this group')
end
def error_creating(organization)
error(organization&.errors&.full_messages || 'Failed to create organization')
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe CustomerRelations::Organizations::CreateService, feature_category: :service_desk do
describe '#execute' do
let_it_be(:user) { create(:user) }
let(:group) { create(:group, :crm_enabled) }
let(:params) { attributes_for(:crm_organization, group: group) }
subject(:response) { described_class.new(group: group, current_user: user, params: params).execute }
it 'creates a crm_organization' do
group.add_developer(user)
expect(response).to be_success
end
it 'returns an error when user does not have permission' do
group.add_reporter(user)
expect(response).to be_error
expect(response.message).to match_array(['You have insufficient permissions to create an organization for this group'])
end
it 'returns an error when the crm_organization is not persisted' do
group.add_developer(user)
params[:name] = nil
expect(response).to be_error
expect(response.message).to match_array(["Name can't be blank"])
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module CustomerRelations
module Contacts
class UpdateService < BaseService
def execute(contact)
return error_no_permissions unless allowed?
handle_active_param
return error_organization_invalid unless organization_valid?
return error_updating(contact) unless contact.update(params)
ServiceResponse.success(payload: contact)
end
private
def handle_active_param
return if params[:active].nil?
active = params.delete(:active)
params[:state] = active ? 'active' : 'inactive'
end
def error_updating(contact)
error(contact&.errors&.full_messages || 'Failed to update contact')
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe CustomerRelations::Contacts::UpdateService, feature_category: :service_desk do
let_it_be(:user) { create(:user) }
let(:contact) { create(:contact, first_name: 'Mark', group: group, state: 'active') }
subject(:update) { described_class.new(group: group, current_user: user, params: params).execute(contact) }
describe '#execute' do
context 'when the user has no permission' do
let_it_be(:group) { create(:group, :crm_enabled) }
let(:params) { { first_name: 'Gary' } }
it 'returns an error' do
response = update
expect(response).to be_error
expect(response.message).to match_array(['You have insufficient permissions to manage contacts for this group'])
end
end
context 'when user has permission' do
let_it_be(:group) { create(:group, :crm_enabled) }
before_all do
group.add_developer(user)
end
context 'when first_name is changed' do
let(:params) { { first_name: 'Gary' } }
it 'updates the contact' do
response = update
expect(response).to be_success
expect(response.payload.first_name).to eq('Gary')
end
end
context 'when activating' do
let(:contact) { create(:contact, state: 'inactive') }
let(:params) { { active: true } }
it 'updates the contact' do
response = update
expect(response).to be_success
expect(response.payload.active?).to be_truthy
end
end
context 'when deactivating' do
let(:params) { { active: false } }
it 'updates the contact' do
response = update
expect(response).to be_success
expect(response.payload.active?).to be_falsy
end
end
context 'when the contact is invalid' do
let(:params) { { first_name: nil } }
it 'returns an error' do
response = update
expect(response).to be_error
expect(response.message).to match_array(["First name can't be blank"])
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module CustomerRelations
module Contacts
class CreateService < BaseService
def execute
return error_no_permissions unless allowed?
return error_organization_invalid unless organization_valid?
contact = Contact.create(params.merge(group_id: group.id))
return error_creating(contact) unless contact.persisted?
ServiceResponse.success(payload: contact)
end
private
def error_creating(contact)
error(contact&.errors&.full_messages || 'Failed to create contact')
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe CustomerRelations::Contacts::CreateService, feature_category: :service_desk do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:not_found_or_does_not_belong) { 'The specified organization was not found or does not belong to this group' }
let(:params) { attributes_for(:contact, group: group) }
subject(:response) { described_class.new(group: group, current_user: user, params: params).execute }
context 'when user does not have permission' do
let_it_be(:group) { create(:group, :crm_enabled) }
before_all do
group.add_reporter(user)
end
it 'returns an error' do
expect(response).to be_error
expect(response.message).to match_array(['You have insufficient permissions to manage contacts for this group'])
end
end
context 'when user has permission' do
let_it_be(:group) { create(:group, :crm_enabled) }
before_all do
group.add_developer(user)
end
it 'creates a contact' do
expect(response).to be_success
end
it 'returns an error when the contact is not persisted' do
params[:last_name] = nil
expect(response).to be_error
expect(response.message).to match_array(["Last name can't be blank"])
end
it 'returns an error when the organization_id is invalid' do
params[:organization_id] = non_existing_record_id
expect(response).to be_error
expect(response.message).to match_array([not_found_or_does_not_belong])
end
it 'returns an error when the organization belongs to a different group' do
crm_organization = create(:crm_organization)
params[:organization_id] = crm_organization.id
expect(response).to be_error
expect(response.message).to match_array([not_found_or_does_not_belong])
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Admin
class SetFeatureFlagService
UnknownOperationError = Class.new(StandardError)
def initialize(feature_flag_name:, params:)
@name = feature_flag_name
@target = Feature::Target.new(params)
@params = params
@force = params[:force]
end
def execute
unless force
error = validate_feature_flag_name
return ServiceResponse.error(message: error, reason: :invalid_feature_flag) if error
end
if target.gate_specified?
update_targets
else
update_global
end
feature_flag = Feature.get(name) # rubocop:disable Gitlab/AvoidFeatureGet
ServiceResponse.success(payload: { feature_flag: feature_flag })
rescue Feature::InvalidOperation => e
ServiceResponse.error(message: e.message, reason: :illegal_operation)
rescue UnknownOperationError => e
ServiceResponse.error(message: e.message, reason: :illegal_operation)
rescue Feature::Target::UnknownTargetError => e
ServiceResponse.error(message: e.message, reason: :actor_not_found)
end
private
attr_reader :name, :params, :target, :force
# Note: the if expressions in `update_targets` and `update_global` are order dependant.
def update_targets
target.targets.each do |target|
if enable?
enable(target)
elsif disable?
Feature.disable(name, target)
elsif opt_out?
Feature.opt_out(name, target)
elsif remove_opt_out?
remove_opt_out(target)
else
raise UnknownOperationError, "Cannot set '#{name}' to #{value.inspect} for #{target}"
end
end
end
def update_global
if enable?
Feature.enable(name)
elsif disable?
Feature.disable(name)
elsif percentage_of_actors?
Feature.enable_percentage_of_actors(name, percentage)
# Deprecated in favor of Feature.enabled?(name, :instance) + Feature.enable_percentage_of_actors(name, percentage)
elsif percentage_of_time?
Feature.enable_percentage_of_time(name, percentage)
else
msg = if key.present?
"Cannot set '#{name}' (#{key.inspect}) to #{value.inspect}"
else
"Cannot set '#{name}' to #{value.inspect}"
end
raise UnknownOperationError, msg
end
end
def remove_opt_out(target)
raise Feature::InvalidOperation, "No opt-out exists for #{target}" unless Feature.opted_out?(name, target)
Feature.remove_opt_out(name, target)
end
def enable(target)
if Feature.opted_out?(name, target)
target_name = target.respond_to?(:to_reference) ? target.to_reference : target.to_s
raise Feature::InvalidOperation, "Opt-out exists for #{target_name} - remove opt-out before enabling"
end
Feature.enable(name, target)
end
def value
params[:value]
end
def key
params[:key]
end
def numeric_value?
params[:value].match?(/^\d+(\.\d+)?$/)
end
def percentage
raise UnknownOperationError, "Not a percentage" unless numeric_value?
value.to_f
end
def percentage_of_actors?
key == 'percentage_of_actors'
end
def percentage_of_time?
return true if key == 'percentage_of_time'
return numeric_value? if key.nil?
false
end
# Note: `key` is NOT considered - setting to a percentage to 0 is the same as disabling.
def disable?
value.in?(%w[0 0.0 false])
end
# Note: `key` is NOT considered - setting to a percentage to 100 is the same
def enable?
value.in?(%w[100 100.0 true])
end
def opt_out?
value == 'opt_out'
end
def remove_opt_out?
value == 'remove_opt_out'
end
def validate_feature_flag_name
## Overridden in EE
end
end
end
Admin::SetFeatureFlagService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Admin::SetFeatureFlagService, feature_category: :feature_flags do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:group) { create(:group) }
let(:feature_name) { known_feature_flag.name }
let(:service) { described_class.new(feature_flag_name: feature_name, params: params) }
# Find any `development` feature flag name
let(:known_feature_flag) do
Feature::Definition.definitions
.values.find { |defn| defn.development? && !defn.default_enabled }
end
describe 'sequences of executions' do
subject(:flag) do
Feature.get(feature_name) # rubocop: disable Gitlab/AvoidFeatureGet
end
context 'if we enable_percentage_of_actors and then disable' do
before do
described_class
.new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '50.0' })
.execute
described_class
.new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '0.0' })
.execute
end
it 'leaves the flag off' do
expect(flag.state).to eq(:off)
end
end
context 'if we enable and then enable_percentage_of_actors' do
before do
described_class
.new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '100.0' })
.execute
end
it 'reports an error' do
result = described_class
.new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '50.0' })
.execute
expect(flag.state).to eq(:on)
expect(result).to be_error
end
context 'if we disable the flag first' do
before do
described_class
.new(feature_flag_name: feature_name, params: { value: 'false' })
.execute
end
it 'sets the percentage of actors' do
result = described_class
.new(feature_flag_name: feature_name, params: { key: 'percentage_of_actors', value: '50.0' })
.execute
expect(flag.state).to eq(:conditional)
expect(result).not_to be_error
end
end
end
end
describe '#execute' do
before do
unstub_all_feature_flags
Feature.reset
Flipper.unregister_groups
Flipper.register(:perf_team) do |actor|
actor.respond_to?(:admin) && actor.admin?
end
end
subject(:result) { service.execute }
context 'when we cannot interpret the operation' do
let(:params) { { value: 'wibble', key: 'unknown' } }
it { is_expected.to be_error }
it { is_expected.to have_attributes(reason: :illegal_operation) }
it { is_expected.to have_attributes(message: %(Cannot set '#{feature_name}' ("unknown") to "wibble")) }
context 'when the key is absent' do
let(:params) { { value: 'wibble' } }
it { is_expected.to be_error }
it { is_expected.to have_attributes(reason: :illegal_operation) }
it { is_expected.to have_attributes(message: %(Cannot set '#{feature_name}' to "wibble")) }
end
end
context 'when the value to set cannot be parsed' do
let(:params) { { value: 'wibble', key: 'percentage_of_actors' } }
it { is_expected.to be_error }
it { is_expected.to have_attributes(reason: :illegal_operation) }
it { is_expected.to have_attributes(message: 'Not a percentage') }
end
context 'when value is "remove_opt_out"' do
before do
Feature.enable(feature_name)
end
context 'without a target' do
let(:params) { { value: 'remove_opt_out' } }
it 'returns an error' do
expect(result).to be_error
expect(result.reason).to eq(:illegal_operation)
end
end
context 'with a target' do
let(:params) { { value: 'remove_opt_out', user: user.username } }
context 'when there is currently no opt-out' do
it 'returns an error' do
expect(result).to be_error
expect(result.reason).to eq(:illegal_operation)
expect(Feature).to be_enabled(feature_name, user)
end
end
context 'when there is currently an opt-out' do
before do
Feature.opt_out(feature_name, user)
end
it 'removes the opt out' do
expect(result).to be_success
expect(Feature).to be_enabled(feature_name, user)
end
end
end
end
context 'when value is "opt_out"' do
let(:params) { { value: 'opt_out', namespace: group.full_path, user: user.username } }
it 'opts the user and group out' do
expect(Feature).to receive(:opt_out).with(feature_name, user)
expect(Feature).to receive(:opt_out).with(feature_name, group)
expect(result).to be_success
end
context 'without a target' do
let(:params) { { value: 'opt_out' } }
it { is_expected.to be_error }
it { is_expected.to have_attributes(reason: :illegal_operation) }
end
end
context 'when enabling the feature flag' do
let(:params) { { value: 'true' } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable).with(feature_name)
expect(subject).to be_success
feature_flag = subject.payload[:feature_flag]
expect(feature_flag.name).to eq(feature_name)
end
context 'when the flag is default_enabled' do
let(:known_feature_flag) do
Feature::Definition.definitions
.values.find { |defn| defn.development? && defn.default_enabled }
end
it 'leaves the flag enabled' do
expect(subject).to be_success
expect(Feature).to be_enabled(feature_name)
end
end
it 'logs the event' do
expect(Feature.logger).to receive(:info).once
subject
end
context 'when enabling for a user actor' do
let(:params) { { value: 'true', user: user.username } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable).with(feature_name, user)
expect(subject).to be_success
end
context 'when the flag has been opted out for user' do
before do
Feature.enable(feature_name)
Feature.opt_out(feature_name, user)
end
it 'records an error' do
expect(subject).to be_error
expect(subject.reason).to eq(:illegal_operation)
expect(Feature).not_to be_enabled(feature_name, user)
end
end
context 'when the flag is default_enabled' do
let(:known_feature_flag) do
Feature::Definition.definitions
.values.find { |defn| defn.development? && defn.default_enabled }
end
it 'leaves the feature enabled' do
expect(subject).to be_success
expect(Feature).to be_enabled(feature_name, user)
end
end
context 'when user does not exist' do
let(:params) { { value: 'true', user: 'unknown-user' } }
it 'does nothing' do
expect(Feature).not_to receive(:enable)
expect(subject).to be_error
expect(subject.reason).to eq(:actor_not_found)
end
end
end
context 'when enabling for a feature group' do
let(:params) { { value: 'true', feature_group: 'perf_team' } }
let(:feature_group) { Feature.group('perf_team') }
it 'enables the feature flag' do
expect(Feature).to receive(:enable).with(feature_name, feature_group)
expect(subject).to be_success
end
end
context 'when enabling for a project' do
let(:params) { { value: 'true', project: project.full_path } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable).with(feature_name, project)
expect(subject).to be_success
end
end
context 'when enabling for a group' do
let(:params) { { value: 'true', group: group.full_path } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable).with(feature_name, group)
expect(subject).to be_success
end
context 'when group does not exist' do
let(:params) { { value: 'true', group: 'unknown-group' } }
it 'returns an error' do
expect(Feature).not_to receive(:disable)
expect(subject).to be_error
expect(subject.reason).to eq(:actor_not_found)
end
end
end
context 'when enabling for a user namespace' do
let(:namespace) { user.namespace }
let(:params) { { value: 'true', namespace: namespace.full_path } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable).with(feature_name, namespace)
expect(subject).to be_success
end
context 'when namespace does not exist' do
let(:params) { { value: 'true', namespace: 'unknown-namespace' } }
it 'returns an error' do
expect(Feature).not_to receive(:disable)
expect(subject).to be_error
expect(subject.reason).to eq(:actor_not_found)
end
end
end
context 'when enabling for a group namespace' do
let(:params) { { value: 'true', namespace: group.full_path } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable).with(feature_name, group)
expect(subject).to be_success
end
end
context 'when enabling for a repository' do
let(:params) { { value: 'true', repository: project.repository.full_path } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable).with(feature_name, project.repository)
expect(subject).to be_success
end
end
context 'when enabling for a user actor and a feature group' do
let(:params) { { value: 'true', user: user.username, feature_group: 'perf_team' } }
let(:feature_group) { Feature.group('perf_team') }
it 'enables the feature flag' do
expect(Feature).to receive(:enable).with(feature_name, user)
expect(Feature).to receive(:enable).with(feature_name, feature_group)
expect(subject).to be_success
end
end
context 'when enabling given a percentage of time' do
let(:params) { { value: '50' } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable_percentage_of_time).with(feature_name, 50)
expect(subject).to be_success
end
context 'when value is a float' do
let(:params) { { value: '0.01' } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable_percentage_of_time).with(feature_name, 0.01)
expect(subject).to be_success
end
end
context 'with a target' do
before do
params[:user] = user.username
end
it { is_expected.to be_error }
it { is_expected.to have_attributes(reason: :illegal_operation) }
end
end
context 'when enabling given a percentage of actors' do
let(:params) { { value: '50', key: 'percentage_of_actors' } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable_percentage_of_actors).with(feature_name, 50)
expect(subject).to be_success
end
context 'when value is a float' do
let(:params) { { value: '0.01', key: 'percentage_of_actors' } }
it 'enables the feature flag' do
expect(Feature).to receive(:enable_percentage_of_actors).with(feature_name, 0.01)
expect(subject).to be_success
end
end
context 'with a target' do
before do
params[:user] = user.username
end
it { is_expected.to be_error }
it { is_expected.to have_attributes(reason: :illegal_operation) }
end
end
end
context 'when disabling the feature flag' do
before do
Feature.enable(feature_name)
end
let(:params) { { value: 'false' } }
it 'disables the feature flag' do
expect(Feature).to receive(:disable).with(feature_name)
expect(subject).to be_success
feature_flag = subject.payload[:feature_flag]
expect(feature_flag.name).to eq(feature_name)
end
it 'logs the event' do
expect(Feature.logger).to receive(:info).once
subject
end
context 'when disabling for a user actor' do
let(:params) { { value: 'false', user: user.username } }
it 'disables the feature flag' do
expect(Feature).to receive(:disable).with(feature_name, user)
expect(subject).to be_success
end
context 'when user does not exist' do
let(:params) { { value: 'false', user: 'unknown-user' } }
it 'returns an error' do
expect(Feature).not_to receive(:disable)
expect(subject).to be_error
expect(subject.reason).to eq(:actor_not_found)
end
end
end
context 'when disabling for a feature group' do
let(:params) { { value: 'false', feature_group: 'perf_team' } }
let(:feature_group) { Feature.group('perf_team') }
it 'disables the feature flag' do
expect(Feature).to receive(:disable).with(feature_name, feature_group)
expect(subject).to be_success
end
end
context 'when disabling for a project' do
let(:params) { { value: 'false', project: project.full_path } }
it 'disables the feature flag' do
expect(Feature).to receive(:disable).with(feature_name, project)
expect(subject).to be_success
end
end
context 'when disabling for a group' do
let(:params) { { value: 'false', group: group.full_path } }
it 'disables the feature flag' do
expect(Feature).to receive(:disable).with(feature_name, group)
expect(subject).to be_success
end
context 'when group does not exist' do
let(:params) { { value: 'false', group: 'unknown-group' } }
it 'returns an error' do
expect(Feature).not_to receive(:disable)
expect(subject).to be_error
expect(subject.reason).to eq(:actor_not_found)
end
end
end
context 'when disabling for a user namespace' do
let(:namespace) { user.namespace }
let(:params) { { value: 'false', namespace: namespace.full_path } }
it 'disables the feature flag' do
expect(Feature).to receive(:disable).with(feature_name, namespace)
expect(subject).to be_success
end
context 'when namespace does not exist' do
let(:params) { { value: 'false', namespace: 'unknown-namespace' } }
it 'returns an error' do
expect(Feature).not_to receive(:disable)
expect(subject).to be_error
expect(subject.reason).to eq(:actor_not_found)
end
end
end
context 'when disabling for a group namespace' do
let(:params) { { value: 'false', namespace: group.full_path } }
it 'disables the feature flag' do
expect(Feature).to receive(:disable).with(feature_name, group)
expect(subject).to be_success
end
end
context 'when disabling for a user actor and a feature group' do
let(:params) { { value: 'false', user: user.username, feature_group: 'perf_team' } }
let(:feature_group) { Feature.group('perf_team') }
it 'disables the feature flag' do
expect(Feature).to receive(:disable).with(feature_name, user)
expect(Feature).to receive(:disable).with(feature_name, feature_group)
expect(subject).to be_success
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Admin
module AbuseReportLabels
class CreateService < Labels::BaseService
def initialize(params = {})
@params = params
end
def execute
params[:color] = convert_color_name_to_hex if params[:color].present?
::Admin::AbuseReportLabel.create(params)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Admin::AbuseReportLabels::CreateService, feature_category: :insider_threat do
describe '#execute' do
let(:color) { 'red' }
let(:color_in_hex) { ::Gitlab::Color.of(color) }
let(:params) { { title: 'FancyLabel', color: color } }
subject(:execute) { described_class.new(params).execute }
shared_examples 'creates a label with the correct values' do
it 'creates a label with the correct values', :aggregate_failures do
expect { execute }.to change { Admin::AbuseReportLabel.count }.from(0).to(1)
label = Admin::AbuseReportLabel.last
expect(label.title).to eq params[:title]
expect(label.color).to eq color_in_hex
end
it 'returns the persisted label' do
result = execute
expect(result).to be_an_instance_of(Admin::AbuseReportLabel)
expect(result.persisted?).to eq true
end
end
it_behaves_like 'creates a label with the correct values'
context 'without color param' do
let(:params) { { title: 'FancyLabel' } }
let(:color_in_hex) { ::Gitlab::Color.of(Label::DEFAULT_COLOR) }
it_behaves_like 'creates a label with the correct values'
end
context 'with errors' do
let!(:existing_label) { create(:abuse_report_label, title: params[:title]) }
it 'does not create the label' do
expect { execute }.not_to change { Admin::AbuseReportLabel.count }
end
it 'returns the label with errors' do
label = execute
expect(label.errors.messages).to include({ title: ["has already been taken"] })
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Admin
module AbuseReports
class ModerateUserService < BaseService
attr_reader :abuse_report, :params, :current_user, :action
def initialize(abuse_report, current_user, params)
@abuse_report = abuse_report
@current_user = current_user
@params = params
@action = determine_action
end
def execute
return ServiceResponse.error(message: 'Admin is required') unless current_user&.can_admin_all_resources?
return ServiceResponse.error(message: 'Action is required') unless action.present?
result = perform_action
if result[:status] == :success
event = close_report_and_record_event
ServiceResponse.success(message: event.success_message)
else
ServiceResponse.error(message: result[:message])
end
end
private
def determine_action
action = params[:user_action]
if action.in?(ResourceEvents::AbuseReportEvent.actions.keys)
action.to_sym
elsif close_report?
:close_report
end
end
def perform_action
case action
when :ban_user then ban_user
when :block_user then block_user
when :delete_user then delete_user
when :close_report then close_report
when :trust_user then trust_user
end
end
def ban_user
Users::BanService.new(current_user).execute(abuse_report.user)
end
def block_user
Users::BlockService.new(current_user).execute(abuse_report.user)
end
def delete_user
abuse_report.user.delete_async(deleted_by: current_user)
success
end
def close_report
return error('Report already closed') if abuse_report.closed?
close_similar_open_reports
abuse_report.closed!
success
end
def trust_user
Users::TrustService.new(current_user).execute(abuse_report.user)
end
def close_similar_open_reports
# admins see the abuse report and other open reports for the same user in one page
# hence, if the request is to close the report, close other open reports for the same user too
abuse_report.similar_open_reports_for_user.update_all(status: 'closed')
end
def close_report_and_record_event
event = action
if close_report? && action != :close_report
close_report
event = "#{action}_and_close_report"
end
record_event(event)
end
def close_report?
params[:close].to_s == 'true'
end
def record_event(action)
reason = params[:reason]
unless reason.in?(ResourceEvents::AbuseReportEvent.reasons.keys)
reason = ResourceEvents::AbuseReportEvent.reasons[:other]
end
abuse_report.events.create(action: action, user: current_user, reason: reason, comment: params[:comment])
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Admin::AbuseReports::ModerateUserService, feature_category: :instance_resiliency do
let_it_be_with_reload(:abuse_report) { create(:abuse_report) }
let_it_be_with_reload(:similar_abuse_report) do
create(:abuse_report, user: abuse_report.user, category: abuse_report.category)
end
let(:action) { 'ban_user' }
let(:close) { true }
let(:reason) { 'spam' }
let(:params) { { user_action: action, close: close, reason: reason, comment: 'obvious spam' } }
let_it_be(:admin) { create(:admin) }
let(:service) { described_class.new(abuse_report, admin, params) }
describe '#execute', :enable_admin_mode do
subject { service.execute }
shared_examples 'returns an error response' do |error|
it 'returns an error response' do
expect(subject.status).to eq :error
expect(subject.message).to eq error
end
end
shared_examples 'closes the report' do
it 'closes the report' do
expect { subject }.to change { abuse_report.closed? }.from(false).to(true)
end
context 'when similar open reports for the user exist' do
it 'closes the similar report' do
expect { subject }.to change { similar_abuse_report.reload.closed? }.from(false).to(true)
end
end
end
shared_examples 'does not close the report' do
it 'does not close the report' do
subject
expect(abuse_report.closed?).to be(false)
end
context 'when similar open reports for the user exist' do
it 'does not close the similar report' do
subject
expect(similar_abuse_report.reload.closed?).to be(false)
end
end
end
shared_examples 'does not record an event' do
it 'does not record an event' do
expect { subject }.not_to change { abuse_report.events.count }
end
end
shared_examples 'records an event' do |action:|
it 'records the event', :aggregate_failures do
expect { subject }.to change { abuse_report.events.count }.by(1)
expect(abuse_report.events.last).to have_attributes(
action: action,
user: admin,
reason: reason,
comment: params[:comment]
)
end
it 'returns the event success message' do
expect(subject.message).to eq(abuse_report.events.last.success_message)
end
end
context 'when invalid parameters are given' do
describe 'invalid user' do
describe 'when no user is given' do
let_it_be(:admin) { nil }
it_behaves_like 'returns an error response', 'Admin is required'
end
describe 'when given user is no admin' do
let_it_be(:admin) { create(:user) }
it_behaves_like 'returns an error response', 'Admin is required'
end
end
describe 'invalid action' do
describe 'when no action is given' do
let(:action) { '' }
let(:close) { 'false' }
it_behaves_like 'returns an error response', 'Action is required'
end
describe 'when unknown action is given' do
let(:action) { 'unknown' }
let(:close) { 'false' }
it_behaves_like 'returns an error response', 'Action is required'
end
end
describe 'invalid reason' do
let(:reason) { '' }
it 'sets the reason to `other`' do
subject
expect(abuse_report.events.last).to have_attributes(reason: 'other')
end
end
end
describe 'when banning the user' do
it 'calls the Users::BanService' do
expect_next_instance_of(Users::BanService, admin) do |service|
expect(service).to receive(:execute).with(abuse_report.user).and_return(status: :success)
end
subject
end
context 'when closing the report' do
it_behaves_like 'closes the report'
it_behaves_like 'records an event', action: 'ban_user_and_close_report'
end
context 'when not closing the report' do
let(:close) { 'false' }
it_behaves_like 'does not close the report'
it_behaves_like 'records an event', action: 'ban_user'
end
context 'when banning the user fails' do
before do
allow_next_instance_of(Users::BanService, admin) do |service|
allow(service).to receive(:execute).with(abuse_report.user)
.and_return(status: :error, message: 'Banning the user failed')
end
end
it_behaves_like 'returns an error response', 'Banning the user failed'
it_behaves_like 'does not close the report'
it_behaves_like 'does not record an event'
end
end
describe 'when blocking the user' do
let(:action) { 'block_user' }
it 'calls the Users::BlockService' do
expect_next_instance_of(Users::BlockService, admin) do |service|
expect(service).to receive(:execute).with(abuse_report.user).and_return(status: :success)
end
subject
end
context 'when closing the report' do
it_behaves_like 'closes the report'
it_behaves_like 'records an event', action: 'block_user_and_close_report'
end
context 'when not closing the report' do
let(:close) { 'false' }
it_behaves_like 'does not close the report'
it_behaves_like 'records an event', action: 'block_user'
end
context 'when blocking the user fails' do
before do
allow_next_instance_of(Users::BlockService, admin) do |service|
allow(service).to receive(:execute).with(abuse_report.user)
.and_return(status: :error, message: 'Blocking the user failed')
end
end
it_behaves_like 'returns an error response', 'Blocking the user failed'
it_behaves_like 'does not close the report'
it_behaves_like 'does not record an event'
end
end
describe 'when deleting the user' do
let(:action) { 'delete_user' }
it 'calls the delete_async method' do
expect(abuse_report.user).to receive(:delete_async).with(deleted_by: admin)
subject
end
context 'when closing the report' do
it_behaves_like 'closes the report'
it_behaves_like 'records an event', action: 'delete_user_and_close_report'
end
context 'when not closing the report' do
let(:close) { 'false' }
it_behaves_like 'does not close the report'
it_behaves_like 'records an event', action: 'delete_user'
end
end
describe 'when trusting the user' do
let(:action) { 'trust_user' }
it 'calls the Users::TrustService method' do
expect_next_instance_of(Users::TrustService, admin) do |service|
expect(service).to receive(:execute).with(abuse_report.user).and_return(status: :success)
end
subject
end
context 'when not closing the report' do
let(:close) { false }
it_behaves_like 'does not close the report'
it_behaves_like 'records an event', action: 'trust_user'
end
context 'when closing the report' do
it_behaves_like 'closes the report'
it_behaves_like 'records an event', action: 'trust_user_and_close_report'
end
context 'when trusting the user fails' do
before do
allow_next_instance_of(Users::TrustService) do |service|
allow(service).to receive(:execute).with(abuse_report.user)
.and_return(status: :error, message: 'Trusting the user failed')
end
end
it_behaves_like 'returns an error response', 'Trusting the user failed'
it_behaves_like 'does not close the report'
it_behaves_like 'does not record an event'
end
end
describe 'when only closing the report' do
let(:action) { '' }
it_behaves_like 'closes the report'
it_behaves_like 'records an event', action: 'close_report'
context 'when report is already closed' do
before do
abuse_report.closed!
end
it_behaves_like 'returns an error response', 'Report already closed'
it_behaves_like 'does not record an event'
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Admin
module AbuseReports
class UpdateService < BaseService
attr_reader :abuse_report, :params, :current_user
def initialize(abuse_report, current_user, params)
@abuse_report = abuse_report
@current_user = current_user
@params = params
end
def execute
return ServiceResponse.error(message: 'Admin is required') unless current_user&.can_admin_all_resources?
abuse_report.label_ids = label_ids
ServiceResponse.success
end
private
def label_ids
params[:label_ids].filter_map do |id|
GitlabSchema.parse_gid(id, expected_type: ::Admin::AbuseReportLabel).model_id
rescue Gitlab::Graphql::Errors::ArgumentError
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Admin::AbuseReports::UpdateService, feature_category: :instance_resiliency do
let_it_be(:current_user) { create(:admin) }
let_it_be(:abuse_report) { create(:abuse_report) }
let_it_be(:label) { create(:abuse_report_label) }
let(:params) { {} }
let(:service) { described_class.new(abuse_report, current_user, params) }
describe '#execute', :enable_admin_mode do
subject { service.execute }
shared_examples 'returns an error response' do |error|
it 'returns an error response' do
expect(subject).to be_error
expect(subject.message).to eq error
end
end
context 'with invalid parameters' do
describe 'invalid user' do
describe 'when no user is given' do
let_it_be(:current_user) { nil }
it_behaves_like 'returns an error response', 'Admin is required'
end
describe 'when given user is not an admin' do
let_it_be(:current_user) { create(:user) }
it_behaves_like 'returns an error response', 'Admin is required'
end
end
describe 'invalid label_ids' do
let(:params) { { label_ids: ['invalid_global_id', non_existing_record_id] } }
it 'does not update the abuse report' do
expect { subject }.not_to change { abuse_report.labels }
end
it { is_expected.to be_success }
end
end
describe 'with valid parameters' do
context 'when label_ids is empty' do
let(:params) { { label_ids: [] } }
context 'when abuse report has existing labels' do
before do
abuse_report.labels = [label]
end
it 'clears the abuse report labels' do
expect { subject }.to change { abuse_report.labels.count }.from(1).to(0)
end
it { is_expected.to be_success }
end
context 'when abuse report has no existing labels' do
it 'does not update the abuse report' do
expect { subject }.not_to change { abuse_report.labels }
end
it { is_expected.to be_success }
end
end
context 'when label_ids is not empty' do
let(:params) { { label_ids: [Gitlab::GlobalId.build(label, id: label.id).to_s] } }
it 'updates the abuse report' do
expect { subject }.to change { abuse_report.label_ids }.from([]).to([label.id])
end
it { is_expected.to be_success }
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Admin
module PlanLimits
class UpdateService < ::BaseService
def initialize(params = {}, current_user:, plan:)
@current_user = current_user
@params = params
@plan = plan
@plan_limits = plan.actual_limits
end
def execute
return error(_('Access denied'), :forbidden) unless can_update?
add_history_to_params!
plan_limits.assign_attributes(parsed_params)
validate_storage_limits
return error(plan_limits.errors.full_messages, :bad_request) if plan_limits.errors.any?
if plan_limits.update(parsed_params)
success
else
error(plan_limits.errors.full_messages, :bad_request)
end
end
private
attr_accessor :current_user, :params, :plan, :plan_limits
delegate :notification_limit, :storage_size_limit, :enforcement_limit, to: :plan_limits
def can_update?
current_user.can_admin_all_resources?
end
def add_history_to_params!
formatted_limits_history = plan_limits.format_limits_history(current_user, parsed_params)
parsed_params.merge!(limits_history: formatted_limits_history) unless formatted_limits_history.empty?
end
def validate_storage_limits
validate_notification_limit
validate_enforcement_limit
validate_storage_size_limit
end
def validate_notification_limit
return unless parsed_params.include?(:notification_limit)
return if unlimited_value?(:notification_limit)
if storage_size_limit > 0 && notification_limit < storage_size_limit
plan_limits.errors.add(
:notification_limit, "must be greater than or equal to the dashboard limit (#{storage_size_limit})"
)
end
return unless enforcement_limit > 0 && notification_limit > enforcement_limit
plan_limits.errors.add(
:notification_limit, "must be less than or equal to the enforcement limit (#{enforcement_limit})"
)
end
def validate_enforcement_limit
return unless parsed_params.include?(:enforcement_limit)
return if unlimited_value?(:enforcement_limit)
if storage_size_limit > 0 && enforcement_limit < storage_size_limit
plan_limits.errors.add(
:enforcement_limit, "must be greater than or equal to the dashboard limit (#{storage_size_limit})"
)
end
return unless notification_limit > 0 && enforcement_limit < notification_limit
plan_limits.errors.add(
:enforcement_limit, "must be greater than or equal to the notification limit (#{notification_limit})"
)
end
def validate_storage_size_limit
return unless parsed_params.include?(:storage_size_limit)
return if unlimited_value?(:storage_size_limit)
if enforcement_limit > 0 && storage_size_limit > enforcement_limit
plan_limits.errors.add(
:dashboard_limit, "must be less than or equal to the enforcement limit (#{enforcement_limit})"
)
end
return unless notification_limit > 0 && storage_size_limit > notification_limit
plan_limits.errors.add(
:dashboard_limit, "must be less than or equal to the notification limit (#{notification_limit})"
)
end
# Overridden in EE
def parsed_params
params
end
def unlimited_value?(limit)
parsed_params[limit] == 0
end
end
end
end
Admin::PlanLimits::UpdateService.prepend_mod_with('Admin::PlanLimits::UpdateService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Admin::PlanLimits::UpdateService, feature_category: :shared do
let_it_be(:user) { create(:admin) }
let_it_be(:plan) { create(:plan, name: 'free') }
let_it_be(:limits) { plan.actual_limits }
let_it_be(:params) do
{
ci_pipeline_size: 101,
ci_active_jobs: 102,
ci_project_subscriptions: 104,
ci_pipeline_schedules: 105,
ci_needs_size_limit: 106,
ci_registered_group_runners: 107,
ci_registered_project_runners: 108,
conan_max_file_size: 10,
enforcement_limit: 100,
generic_packages_max_file_size: 20,
helm_max_file_size: 25,
notification_limit: 95,
maven_max_file_size: 40,
npm_max_file_size: 60,
nuget_max_file_size: 60,
pypi_max_file_size: 70,
terraform_module_max_file_size: 80,
storage_size_limit: 90,
pipeline_hierarchy_size: 250
}
end
subject(:update_plan_limits) { described_class.new(params, current_user: user, plan: plan).execute }
context 'when current_user is an admin', :enable_admin_mode do
context 'when the update is successful', :freeze_time do
let(:current_timestamp) { Time.current.utc.to_i }
it 'updates all attributes' do
update_plan_limits
params.each do |key, value|
expect(limits.send(key)).to eq value
end
end
it 'logs the allowed attributes only' do
update_plan_limits
expect(limits.limits_history).to eq(
{ "enforcement_limit" =>
[{ "user_id" => user.id, "username" => user.username,
"timestamp" => current_timestamp, "value" => 100 }],
"notification_limit" =>
[{ "user_id" => user.id, "username" => user.username,
"timestamp" => current_timestamp, "value" => 95 }],
"storage_size_limit" =>
[{ "user_id" => user.id, "username" => user.username,
"timestamp" => current_timestamp, "value" => 90 }] }
)
end
it 'returns success' do
response = update_plan_limits
expect(response[:status]).to eq :success
end
end
context 'when the update is unsuccessful' do
context 'when notification_limit is less than storage_size_limit' do
let(:params) { { notification_limit: 2 } }
before do
limits.update!(
storage_size_limit: 5,
enforcement_limit: 10
)
end
it 'returns an error' do
response = update_plan_limits
expect(response[:status]).to eq :error
expect(response[:message]).to eq [
"Notification limit must be greater than or equal to the dashboard limit (5)"
]
end
end
context 'when notification_limit is greater than enforcement_limit' do
let(:params) { { notification_limit: 11 } }
before do
limits.update!(
storage_size_limit: 5,
enforcement_limit: 10
)
end
it 'returns an error' do
response = update_plan_limits
expect(response[:status]).to eq :error
expect(response[:message]).to eq [
"Notification limit must be less than or equal to the enforcement limit (10)"
]
end
end
context 'when enforcement_limit is less than storage_size_limit' do
let(:params) { { enforcement_limit: 9 } }
before do
limits.update!(
storage_size_limit: 10,
notification_limit: 9
)
end
it 'returns an error' do
response = update_plan_limits
expect(response[:status]).to eq :error
expect(response[:message]).to eq [
"Enforcement limit must be greater than or equal to the dashboard limit (10)"
]
end
end
context 'when enforcement_limit is less than notification_limit' do
let(:params) { { enforcement_limit: 9 } }
before do
limits.update!(
storage_size_limit: 9,
notification_limit: 10
)
end
it 'returns an error' do
response = update_plan_limits
expect(response[:status]).to eq :error
expect(response[:message]).to eq [
"Enforcement limit must be greater than or equal to the notification limit (10)"
]
end
end
context 'when storage_size_limit is greater than notification_limit' do
let(:params) { { storage_size_limit: 11 } }
before do
limits.update!(
enforcement_limit: 12,
notification_limit: 10
)
end
it 'returns an error' do
response = update_plan_limits
expect(response[:status]).to eq :error
expect(response[:message]).to eq [
"Dashboard limit must be less than or equal to the notification limit (10)"
]
end
end
context 'when storage_size_limit is greater than enforcement_limit' do
let(:params) { { storage_size_limit: 11 } }
before do
limits.update!(
enforcement_limit: 10,
notification_limit: 11
)
end
it 'returns an error' do
response = update_plan_limits
expect(response[:status]).to eq :error
expect(response[:message]).to eq [
"Dashboard limit must be less than or equal to the enforcement limit (10)"
]
end
context 'when enforcement_limit is 0' do
before do
limits.update!(
enforcement_limit: 0
)
end
it 'does not return an error' do
response = update_plan_limits
expect(response[:status]).to eq :success
end
end
end
end
context 'when setting limit to unlimited' do
before do
limits.update!(
notification_limit: 10,
storage_size_limit: 10,
enforcement_limit: 10
)
end
[:notification_limit, :enforcement_limit, :storage_size_limit].each do |limit|
context "for #{limit}" do
let(:params) { { limit => 0 } }
it 'is successful' do
response = update_plan_limits
expect(response[:status]).to eq :success
end
end
end
end
end
context 'when the user is not an admin' do
let(:user) { create(:user) }
it 'returns an error' do
response = update_plan_limits
expect(response[:status]).to eq :error
expect(response[:message]).to eq 'Access denied'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module TimelineEventTags
class CreateService < TimelineEventTags::BaseService
attr_reader :project, :user, :params
def initialize(project, user, params)
@project = project
@user = user
@params = params
end
def execute
return error_no_permissions unless allowed?
timeline_event_tag_params = {
project: project,
name: params[:name]
}
timeline_event_tag = IncidentManagement::TimelineEventTag.new(timeline_event_tag_params)
if timeline_event_tag.save
success(timeline_event_tag)
else
error_in_save(timeline_event_tag)
end
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::TimelineEventTags::CreateService, feature_category: :incident_management do
let_it_be(:user_with_permissions) { create(:user) }
let_it_be(:user_without_permissions) { create(:user) }
let_it_be_with_reload(:project) { create(:project) }
let(:current_user) { user_with_permissions }
let(:args) { { 'name': 'Test tag 1', 'project_path': project.full_path } }
let(:service) { described_class.new(project, current_user, args) }
before do
project.add_maintainer(user_with_permissions)
project.add_developer(user_without_permissions)
end
describe '#execute' do
shared_examples 'error response' do |message|
it 'has an informative message' do
expect(execute).to be_error
expect(execute.message).to eq(message)
end
end
shared_examples 'success response' do
it 'has timeline event tag' do
expect(execute).to be_success
result = execute.payload[:timeline_event_tag]
expect(result).to be_a(::IncidentManagement::TimelineEventTag)
expect(result.name).to eq(args[:name])
expect(result.project).to eq(project)
end
end
subject(:execute) { service.execute }
context 'when current user is nil' do
let(:current_user) { nil }
it_behaves_like 'error response',
'You have insufficient permissions to manage timeline event tags for this project'
end
context 'when user does not have permissions to create tags' do
let(:current_user) { user_without_permissions }
it_behaves_like 'error response',
'You have insufficient permissions to manage timeline event tags for this project'
end
context 'when error occurs during creation' do
let(:args) { {} }
it_behaves_like 'error response', "Name can't be blank and Name is invalid"
end
context 'when user has permissions' do
it_behaves_like 'success response'
it 'creates database record' do
expect { execute }.to change {
::IncidentManagement::TimelineEventTag.where(project_id: project.id).count
}.by(1)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module LinkAlerts
class DestroyService < BaseService
# @param incident [Issue] an incident to unlink alert from
# @param current_user [User]
# @param alert [AlertManagement::Alert] an alert to unlink from the incident
def initialize(incident, current_user, alert)
@incident = incident
@current_user = current_user
@alert = alert
super(project: incident.project, current_user: current_user)
end
def execute
return error_no_permissions unless allowed?
incident.alert_management_alerts.delete(alert)
success
end
private
attr_reader :alert
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::LinkAlerts::DestroyService, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
let_it_be(:another_project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:incident) { create(:incident, project: project) }
let_it_be(:another_incident) { create(:incident, project: project) }
let_it_be(:internal_alert) { create(:alert_management_alert, project: project, issue: incident) }
let_it_be(:external_alert) { create(:alert_management_alert, project: another_project, issue: incident) }
let_it_be(:unrelated_alert) { create(:alert_management_alert, project: project, issue: another_incident) }
before_all do
project.add_guest(guest)
project.add_developer(developer)
end
describe '#execute' do
subject(:execute) { described_class.new(incident, current_user, alert).execute }
let(:alert) { internal_alert }
context 'when current user is a guest' do
let(:current_user) { guest }
it 'responds with error', :aggregate_failures do
response = execute
expect(response).to be_error
expect(response.message).to eq('You have insufficient permissions to manage alerts for this project')
end
it 'does not unlink alert from the incident' do
expect { execute }.not_to change { incident.reload.alert_management_alerts.to_a }
end
end
context 'when current user is a developer' do
let(:current_user) { developer }
it 'responds with success', :aggregate_failures do
response = execute
expect(response).to be_success
expect(response.payload[:incident]).to eq(incident)
end
context 'when unlinking internal alert' do
let(:alert) { internal_alert }
it 'unlinks the alert' do
expect { execute }
.to change { incident.reload.alert_management_alerts.to_a }
.to match_array([external_alert])
end
end
context 'when unlinking external alert' do
let(:alert) { external_alert }
it 'unlinks the alert' do
expect { execute }
.to change { incident.reload.alert_management_alerts.to_a }
.to match_array([internal_alert])
end
end
context 'when unlinking an alert not related to the incident' do
let(:alert) { unrelated_alert }
it "does not change the incident's alerts" do
expect { execute }.not_to change { incident.reload.alert_management_alerts.to_a }
end
it "does not change another incident's alerts" do
expect { execute }.not_to change { another_incident.reload.alert_management_alerts.to_a }
end
it "does not change the alert's incident" do
expect { execute }.not_to change { unrelated_alert.reload.issue }
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module LinkAlerts
class CreateService < BaseService
# @param incident [Issue] an incident to link alerts
# @param current_user [User]
# @param alert_references [[String]] a list of alert references. Can be either a short reference or URL
# Examples:
# "^alert#IID"
# "https://gitlab.com/company/project/-/alert_management/IID/details"
def initialize(incident, current_user, alert_references)
@incident = incident
@current_user = current_user
@alert_references = alert_references
super(project: incident.project, current_user: current_user)
end
def execute
return error_no_permissions unless allowed?
references = extract_alerts_from_references
incident.alert_management_alerts << references if references.present?
success
end
private
attr_reader :alert_references
def extract_alerts_from_references
text = alert_references.join(' ')
extractor = Gitlab::ReferenceExtractor.new(project, current_user)
extractor.analyze(text, {})
extractor.alerts
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::LinkAlerts::CreateService, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
let_it_be(:another_project) { create(:project) }
let_it_be(:linked_alert) { create(:alert_management_alert, project: project) }
let_it_be(:alert1) { create(:alert_management_alert, project: project) }
let_it_be(:alert2) { create(:alert_management_alert, project: project) }
let_it_be(:external_alert) { create(:alert_management_alert, project: another_project) }
let_it_be(:incident) { create(:incident, project: project, alert_management_alerts: [linked_alert]) }
let_it_be(:guest) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:another_developer) { create(:user) }
before_all do
project.add_guest(guest)
project.add_developer(developer)
project.add_developer(another_developer)
another_project.add_guest(guest)
another_project.add_developer(developer)
end
describe '#execute' do
subject(:execute) { described_class.new(incident, current_user, alert_references).execute }
let(:alert_references) { [alert1.to_reference, alert2.details_url] }
context 'when current user is a guest' do
let(:current_user) { guest }
it 'responds with error', :aggregate_failures do
response = execute
expect(response).to be_error
expect(response.message).to eq('You have insufficient permissions to manage alerts for this project')
end
it 'does not link alerts to the incident' do
expect { execute }.not_to change { incident.reload.alert_management_alerts.to_a }
end
end
context 'when current user is a developer' do
let(:current_user) { developer }
it 'responds with success', :aggregate_failures do
response = execute
expect(response).to be_success
expect(response.payload[:incident]).to eq(incident)
end
it 'links alerts to the incident' do
expect { execute }
.to change { incident.reload.alert_management_alerts.to_a }
.from([linked_alert])
.to match_array([linked_alert, alert1, alert2])
end
context 'when linking an already linked alert' do
let(:alert_references) { [linked_alert.details_url] }
it 'does not change incident alerts list' do
expect { execute }.not_to change { incident.reload.alert_management_alerts.to_a }
end
end
context 'when linking an alert from another project' do
let(:alert_references) { [external_alert.details_url] }
it 'links an external alert to the incident' do
expect { execute }
.to change { incident.reload.alert_management_alerts.to_a }
.from([linked_alert])
.to match_array([linked_alert, external_alert])
end
end
end
context 'when current user does not have permission to read alerts on external project' do
let(:current_user) { another_developer }
context 'when linking alerts from current and external projects' do
let(:alert_references) { [alert1.details_url, external_alert.details_url] }
it 'links only alerts the current user can read' do
expect { execute }
.to change { incident.reload.alert_management_alerts.to_a }
.from([linked_alert])
.to match_array([linked_alert, alert1])
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module TimelineEvents
class DestroyService < TimelineEvents::BaseService
# @param timeline_event [IncidentManagement::TimelineEvent]
# @param user [User]
def initialize(timeline_event, user)
@timeline_event = timeline_event
@user = user
@incident = timeline_event.incident
@project = @incident.project
end
def execute
return error_no_permissions unless allowed?
if timeline_event.destroy
add_system_note(incident, user)
track_timeline_event('incident_management_timeline_event_deleted', project)
success(timeline_event)
else
error_in_save(timeline_event)
end
end
private
attr_reader :project, :timeline_event, :user, :incident
def add_system_note(incident, user)
SystemNoteService.delete_timeline_event(incident, user)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::TimelineEvents::DestroyService, feature_category: :incident_management do
let_it_be(:user_with_permissions) { create(:user) }
let_it_be(:user_without_permissions) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be_with_refind(:incident) { create(:incident, project: project) }
let!(:timeline_event) { create(:incident_management_timeline_event, incident: incident, project: project) }
let(:current_user) { user_with_permissions }
let(:params) { {} }
let(:service) { described_class.new(timeline_event, current_user) }
before_all do
project.add_developer(user_with_permissions)
project.add_reporter(user_without_permissions)
end
describe '#execute' do
shared_examples 'error response' do |message|
it 'has an informative message' do
expect(execute).to be_error
expect(execute.message).to eq(message)
end
it_behaves_like 'does not track incident management event', :incident_management_timeline_event_deleted
end
subject(:execute) { service.execute }
context 'when current user is anonymous' do
let(:current_user) { nil }
it_behaves_like 'error response', 'You have insufficient permissions to manage timeline events for this incident'
end
context 'when user does not have permissions to remove timeline events' do
let(:current_user) { user_without_permissions }
it_behaves_like 'error response', 'You have insufficient permissions to manage timeline events for this incident'
end
context 'when an error occurs during removal' do
before do
allow(timeline_event).to receive(:destroy).and_return(false)
timeline_event.errors.add(:note, 'cannot be removed')
end
it_behaves_like 'error response', 'Timeline text cannot be removed'
end
context 'with success response' do
it 'successfully returns the timeline event', :aggregate_failures do
expect(execute).to be_success
result = execute.payload[:timeline_event]
expect(result).to be_a(::IncidentManagement::TimelineEvent)
expect(result.id).to eq(timeline_event.id)
end
it 'creates a system note' do
expect { execute }.to change { incident.notes.reload.count }.by(1)
end
it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_deleted
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:namespace) { project.namespace.reload }
let(:category) { described_class.to_s }
let(:user) { current_user }
let(:action) { 'incident_management_timeline_event_deleted' }
let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module TimelineEvents
# @param timeline_event [IncidentManagement::TimelineEvent]
# @param user [User]
# @param params [Hash]
# @option params [string] note
# @option params [datetime] occurred_at
class UpdateService < TimelineEvents::BaseService
VALIDATION_CONTEXT = :user_input
def initialize(timeline_event, user, params)
@timeline_event = timeline_event
@incident = timeline_event.incident
@project = incident.project
@user = user
@note = params[:note]
@occurred_at = params[:occurred_at]
@validation_context = VALIDATION_CONTEXT
@timeline_event_tags = params[:timeline_event_tag_names]
end
def execute
return error_no_permissions unless allowed?
unless timeline_event_tags.nil?
auto_create_predefined_tags(timeline_event_tags)
# Refetches the tag objects to consider predefined tags as well
new_tags = timeline_event
.project
.incident_management_timeline_event_tags
.by_names(timeline_event_tags)
non_existing_tags = validate_tags(new_tags)
return error("#{_("Following tags don't exist")}: #{non_existing_tags}") if non_existing_tags.any?
end
begin
timeline_event_saved = update_timeline_event_and_event_tags(new_tags)
rescue ActiveRecord::RecordInvalid
error_in_save(timeline_event)
end
if timeline_event_saved
add_system_note(timeline_event)
track_timeline_event('incident_management_timeline_event_edited', timeline_event.project)
success(timeline_event)
else
error_in_save(timeline_event)
end
end
private
attr_reader :timeline_event, :incident, :project, :user,
:note, :occurred_at, :validation_context, :timeline_event_tags
def update_timeline_event_and_event_tags(new_tags)
ApplicationRecord.transaction do
timeline_event.timeline_event_tags = new_tags unless timeline_event_tags.nil?
timeline_event.assign_attributes(update_params)
timeline_event.save!(context: validation_context)
end
end
def update_params
{ updated_by_user: user, note: note, occurred_at: occurred_at }.compact
end
def add_system_note(timeline_event)
changes = was_changed(timeline_event)
return if changes == :none
SystemNoteService.edit_timeline_event(timeline_event, user, was_changed: changes)
end
def was_changed(timeline_event)
changes = timeline_event.previous_changes
occurred_at_changed = changes.key?('occurred_at')
note_changed = changes.key?('note')
return :occurred_at_and_note if occurred_at_changed && note_changed
return :occurred_at if occurred_at_changed
return :note if note_changed
:none
end
def validate_tags(new_tags)
timeline_event_tags.map(&:downcase) - new_tags.map(&:name).map(&:downcase)
end
def allowed?
user&.can?(:edit_incident_management_timeline_event, timeline_event)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::TimelineEvents::UpdateService, feature_category: :incident_management do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:incident) { create(:incident, project: project) }
let_it_be(:tag1) { create(:incident_management_timeline_event_tag, project: project, name: 'Tag 1') }
let_it_be(:tag2) { create(:incident_management_timeline_event_tag, project: project, name: 'Tag 2') }
let_it_be(:tag3) { create(:incident_management_timeline_event_tag, project: project, name: 'Tag 3') }
let!(:tag_link1) do
create(:incident_management_timeline_event_tag_link,
timeline_event: timeline_event,
timeline_event_tag: tag3
)
end
let!(:timeline_event) { create(:incident_management_timeline_event, project: project, incident: incident) }
let(:occurred_at) { 1.minute.ago }
let(:params) { { note: 'Updated note', occurred_at: occurred_at } }
let(:current_user) { user }
describe '#execute' do
shared_examples 'successful tag response' do
it_behaves_like 'successful response'
it 'adds the new tag' do
expect { execute }.to change { timeline_event.timeline_event_tags.count }.by(1)
end
it 'adds the new tag link' do
expect { execute }.to change { IncidentManagement::TimelineEventTagLink.count }.by(1)
end
it 'returns the new tag in response' do
timeline_event = execute.payload[:timeline_event]
expect(timeline_event.timeline_event_tags.pluck_names).to contain_exactly(tag1.name, tag3.name)
end
end
shared_examples 'successful response' do
it 'responds with success', :aggregate_failures do
expect(execute).to be_success
expect(execute.payload).to eq(timeline_event: timeline_event.reload)
end
it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_edited
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:namespace) { project.namespace.reload }
let(:category) { described_class.to_s }
let(:action) { 'incident_management_timeline_event_edited' }
let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
end
end
shared_examples 'error response' do |message|
it 'has an informative message' do
expect(execute).to be_error
expect(execute.message).to eq(message)
end
it 'does not update the note' do
expect { execute }.not_to change { timeline_event.reload.note }
end
it_behaves_like 'does not track incident management event', :incident_management_timeline_event_edited
end
shared_examples 'passing the correct was_changed value' do |was_changed|
it 'passes the correct was_changed value into SysteNoteService.edit_timeline_event' do
expect(SystemNoteService)
.to receive(:edit_timeline_event)
.with(timeline_event, user, was_changed: was_changed)
.and_call_original
execute
end
end
subject(:execute) { described_class.new(timeline_event, user, params).execute }
context 'when user has permissions' do
before do
project.add_developer(user)
end
it_behaves_like 'successful response'
it 'updates attributes' do
expect { execute }.to change { timeline_event.note }.to(params[:note])
.and change { timeline_event.occurred_at }.to(params[:occurred_at])
end
it 'creates a system note' do
expect { execute }.to change { incident.notes.reload.count }.by(1)
end
it_behaves_like 'passing the correct was_changed value', :occurred_at_and_note
context 'when note is nil' do
let(:params) { { occurred_at: occurred_at, timeline_event_tag_names: [tag3.name, tag2.name] } }
it_behaves_like 'successful response'
it_behaves_like 'passing the correct was_changed value', :occurred_at
it 'does not update the note' do
expect { execute }.not_to change { timeline_event.reload.note }
end
it 'updates occurred_at' do
expect { execute }.to change { timeline_event.occurred_at }.to(params[:occurred_at])
end
it 'updates the tags' do
expect { execute }.to change { timeline_event.timeline_event_tags.count }.by(1)
end
end
context 'when note is blank' do
let(:params) { { note: '', occurred_at: occurred_at, timeline_event_tag_names: [tag3.name, tag2.name] } }
it_behaves_like 'error response', "Timeline text can't be blank"
it 'does not add the tags as it rollsback the transaction' do
expect { execute }.not_to change { timeline_event.timeline_event_tags.count }
end
end
context 'when note is more than 280 characters long' do
let(:params) { { note: 'n' * 281, occurred_at: occurred_at, timeline_event_tag_names: [tag3.name, tag2.name] } }
it_behaves_like 'error response', 'Timeline text is too long (maximum is 280 characters)'
it 'does not add the tags as it rollsback the transaction' do
expect { execute }.not_to change { timeline_event.timeline_event_tags.count }
end
end
context 'when occurred_at is nil' do
let(:params) { { note: 'Updated note' } }
it_behaves_like 'successful response'
it_behaves_like 'passing the correct was_changed value', :note
it 'updates the note' do
expect { execute }.to change { timeline_event.note }.to(params[:note])
end
it 'does not update occurred_at' do
expect { execute }.not_to change { timeline_event.reload.occurred_at }
end
end
context 'when occurred_at is blank' do
let(:params) { { note: 'Updated note', occurred_at: '', timeline_event_tag_names: [tag3.name, tag2.name] } }
it_behaves_like 'error response', "Occurred at can't be blank"
it 'does not add the tags as it rollsback the transaction' do
expect { execute }.not_to change { timeline_event.timeline_event_tags.count }
end
end
context 'when both occurred_at and note is nil' do
let(:params) { {} }
it_behaves_like 'successful response'
it 'does not update the note' do
expect { execute }.not_to change { timeline_event.note }
end
it 'does not update occurred_at' do
expect { execute }.not_to change { timeline_event.reload.occurred_at }
end
it 'does not call SysteNoteService.edit_timeline_event' do
expect(SystemNoteService).not_to receive(:edit_timeline_event)
execute
end
end
context 'when timeline event is non-editable' do
let!(:timeline_event) do
create(:incident_management_timeline_event, :non_editable, project: project, incident: incident)
end
it_behaves_like 'error response',
'You have insufficient permissions to manage timeline events for this incident'
end
context 'when timeline event tags are passed' do
context 'when predefined tags are passed' do
let(:params) do
{
note: 'Updated note',
occurred_at: occurred_at,
timeline_event_tag_names: ['start time', 'end time', 'response initiated']
}
end
it 'returns the new tag in response' do
timeline_event = execute.payload[:timeline_event]
expect(timeline_event.timeline_event_tags.pluck_names).to contain_exactly(
'Start time', 'End time', 'Response initiated')
end
it 'creates the predefined tags on the project' do
execute
expect(project.incident_management_timeline_event_tags.pluck_names).to include(
'Start time', 'End time', 'Response initiated')
end
end
context 'when they exist' do
let(:params) do
{
note: 'Updated note',
occurred_at: occurred_at,
timeline_event_tag_names: [tag3.name, tag1.name]
}
end
it_behaves_like 'successful tag response'
context 'when tag name is of random case' do
let(:params) do
{
note: 'Updated note',
occurred_at: occurred_at,
timeline_event_tag_names: ['tAg 3', 'TaG 1']
}
end
it_behaves_like 'successful tag response'
end
context 'when tag is removed' do
let(:params) { { note: 'Updated note', occurred_at: occurred_at, timeline_event_tag_names: [tag2.name] } }
it_behaves_like 'successful response'
it 'adds the new tag and removes the old tag' do
# Since it adds a tag (+1) and removes old tag (-1) so next change in count in 0
expect { execute }.to change { timeline_event.timeline_event_tags.count }.by(0)
end
it 'adds the new tag link and removes the old tag link' do
# Since it adds a tag link (+1) and removes old tag link (-1) so next change in count in 0
expect { execute }.to change { IncidentManagement::TimelineEventTagLink.count }.by(0)
end
it 'returns the new tag and does not contain the old tag in response' do
timeline_event = execute.payload[:timeline_event]
expect(timeline_event.timeline_event_tags.pluck_names).to contain_exactly(tag2.name)
end
end
context 'when all assigned tags are removed' do
let(:params) { { note: 'Updated note', occurred_at: occurred_at, timeline_event_tag_names: [] } }
it_behaves_like 'successful response'
it 'removes all the assigned tags' do
expect { execute }.to change { timeline_event.timeline_event_tags.count }.by(-1)
end
it 'removes all the assigned tag links' do
expect { execute }.to change { IncidentManagement::TimelineEventTagLink.count }.by(-1)
end
it 'does not contain any tags in response' do
timeline_event = execute.payload[:timeline_event]
expect(timeline_event.timeline_event_tags.pluck_names).to be_empty
end
end
end
context 'when they do not exist' do
let(:params) do
{
note: 'Updated note 2',
occurred_at: occurred_at,
timeline_event_tag_names: ['non existing tag']
}
end
it_behaves_like 'error response', "Following tags don't exist: [\"non existing tag\"]"
it 'does not update the note' do
expect { execute }.not_to change { timeline_event.reload.note }
end
end
end
end
context 'when user does not have permissions' do
before do
project.add_reporter(user)
end
it_behaves_like 'error response',
'You have insufficient permissions to manage timeline events for this incident'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module TimelineEvents
DEFAULT_ACTION = 'comment'
DEFAULT_EDITABLE = false
DEFAULT_AUTO_CREATED = false
class CreateService < TimelineEvents::BaseService
def initialize(incident, user, params)
@project = incident.project
@incident = incident
@user = user
@params = params
@auto_created = !!params.fetch(:auto_created, DEFAULT_AUTO_CREATED)
end
class << self
def create_incident(incident, user)
note = "@#{user.username} created the incident"
occurred_at = incident.created_at
action = 'issues'
new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
end
def reopen_incident(incident, user)
note = "@#{user.username} reopened the incident"
occurred_at = incident.updated_at
action = 'issues'
new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
end
def resolve_incident(incident, user)
note = "@#{user.username} resolved the incident"
occurred_at = incident.updated_at
action = 'status'
new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
end
def change_incident_status(incident, user, escalation_status)
status = escalation_status.status_name.to_s.titleize
note = "@#{user.username} changed the incident status to **#{status}**"
occurred_at = incident.updated_at
action = 'status'
new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
end
def change_severity(incident, user)
severity_label = IssuableSeverity::SEVERITY_LABELS[incident.severity.to_sym]
note = "@#{user.username} changed the incident severity to **#{severity_label}**"
occurred_at = incident.updated_at
action = 'severity'
new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
end
def change_labels(incident, user, added_labels: [], removed_labels: [])
return if Feature.disabled?(:incident_timeline_events_from_labels, incident.project)
if added_labels.blank? && removed_labels.blank?
return ServiceResponse.error(message: _('There are no changed labels'))
end
labels_note = -> (verb, labels) {
"#{verb} #{labels.map(&:to_reference).join(' ')} #{'label'.pluralize(labels.count)}" if labels.present?
}
added_note = labels_note.call('added', added_labels)
removed_note = labels_note.call('removed', removed_labels)
note = "@#{user.username} #{[added_note, removed_note].compact.join(' and ')}"
occurred_at = incident.updated_at
action = 'label'
new(incident, user, note: note, occurred_at: occurred_at, action: action, auto_created: true).execute
end
end
def execute
return error_no_permissions unless allowed?
timeline_event_params = {
project: project,
incident: incident,
author: user,
note: params[:note],
action: params.fetch(:action, DEFAULT_ACTION),
note_html: params[:note_html].presence || params[:note],
occurred_at: params[:occurred_at],
promoted_from_note: params[:promoted_from_note],
editable: params.fetch(:editable, DEFAULT_EDITABLE)
}
non_existing_tags = validate_tags(project, params[:timeline_event_tag_names])
return error("#{_("Following tags don't exist")}: #{non_existing_tags}") unless non_existing_tags.empty?
timeline_event = IncidentManagement::TimelineEvent.new(timeline_event_params)
if timeline_event.save(context: validation_context)
add_system_note(timeline_event)
create_timeline_event_tag_links(timeline_event, params[:timeline_event_tag_names])
track_timeline_event("incident_management_timeline_event_created", project)
success(timeline_event)
else
error_in_save(timeline_event)
end
end
private
attr_reader :project, :user, :incident, :params, :auto_created
def allowed?
return true if auto_created
super
end
def add_system_note(timeline_event)
return if auto_created
SystemNoteService.add_timeline_event(timeline_event)
end
def validation_context
:user_input if !auto_created && params[:promoted_from_note].blank?
end
def create_timeline_event_tag_links(timeline_event, tag_names)
return unless tag_names&.any?
auto_create_predefined_tags(tag_names)
# Refetches the tag objects to consider predefined tags as well
tags = project.incident_management_timeline_event_tags.by_names(tag_names)
tag_links = tags.select(:id).map do |tag|
{
timeline_event_id: timeline_event.id,
timeline_event_tag_id: tag.id,
created_at: DateTime.current
}
end
IncidentManagement::TimelineEventTagLink.insert_all(tag_links) if tag_links.any?
end
def validate_tags(project, tag_names)
return [] unless tag_names&.any?
predefined_tags = TimelineEventTag::PREDEFINED_TAGS.map(&:downcase)
tag_names_downcased = tag_names.map(&:downcase)
tags = project.incident_management_timeline_event_tags.by_names(tag_names).pluck_names.map(&:downcase)
# remove tags from given tag_names and also remove predefined tags which can be auto created
tag_names_downcased - tags - predefined_tags
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::TimelineEvents::CreateService, feature_category: :incident_management do
let_it_be(:user_with_permissions) { create(:user) }
let_it_be(:user_without_permissions) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be_with_refind(:incident) { create(:incident, project: project) }
let_it_be(:comment) { create(:note, project: project, noteable: incident) }
let_it_be(:timeline_event_tag) do
create(:incident_management_timeline_event_tag, name: 'Test tag 1', project: project)
end
let(:args) do
{
note: 'note',
occurred_at: Time.current,
action: 'new comment',
promoted_from_note: comment
}
end
let(:editable) { false }
let(:current_user) { user_with_permissions }
let(:service) { described_class.new(incident, current_user, args) }
before_all do
project.add_developer(user_with_permissions)
project.add_reporter(user_without_permissions)
end
describe '#execute' do
shared_examples 'error response' do |message|
it 'has an informative message' do
expect(execute).to be_error
expect(execute.message).to eq(message)
end
it_behaves_like 'does not track incident management event', :incident_management_timeline_event_created
end
shared_examples 'success response' do
it 'has timeline event', :aggregate_failures do
expect(execute).to be_success
result = execute.payload[:timeline_event]
expect(result).to be_a(::IncidentManagement::TimelineEvent)
expect(result.author).to eq(current_user)
expect(result.incident).to eq(incident)
expect(result.project).to eq(project)
expect(result.note).to eq(args[:note])
expect(result.promoted_from_note).to eq(comment)
expect(result.editable).to eq(editable)
end
it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_created
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:namespace) { project.namespace.reload }
let(:category) { described_class.to_s }
let(:user) { current_user }
let(:action) { 'incident_management_timeline_event_created' }
let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
end
end
subject(:execute) { service.execute }
context 'when current user is blank' do
let(:current_user) { nil }
it_behaves_like 'error response', 'You have insufficient permissions to manage timeline events for this incident'
end
context 'when user does not have permissions to create timeline events' do
let(:current_user) { user_without_permissions }
it_behaves_like 'error response', 'You have insufficient permissions to manage timeline events for this incident'
end
context 'when error occurs during creation' do
let(:args) { {} }
it_behaves_like 'error response', "Occurred at can't be blank and Timeline text can't be blank"
end
context 'with default action' do
let(:args) { { note: 'note', occurred_at: Time.current, promoted_from_note: comment } }
it_behaves_like 'success response'
it 'matches the default action', :aggregate_failures do
result = execute.payload[:timeline_event]
expect(result.action).to eq(IncidentManagement::TimelineEvents::DEFAULT_ACTION)
end
it 'creates a system note' do
expect { execute }.to change { incident.notes.reload.count }.by(1)
end
context 'with auto_created param' do
let(:args) do
{
note: 'note',
occurred_at: Time.current,
action: 'new comment',
promoted_from_note: comment,
auto_created: auto_created
}
end
context 'when auto_created is true' do
let(:auto_created) { true }
it 'does not create a system note' do
expect { execute }.not_to change { incident.notes.reload.count }
end
context 'when user does not have permissions' do
let(:current_user) { user_without_permissions }
it_behaves_like 'success response'
end
end
context 'when auto_created is false' do
let(:auto_created) { false }
it 'creates a system note' do
expect { execute }.to change { incident.notes.reload.count }.by(1)
end
end
end
end
context 'with non_default action' do
it_behaves_like 'success response'
it 'matches the action from arguments', :aggregate_failures do
result = execute.payload[:timeline_event]
expect(result.action).to eq(args[:action])
end
end
context 'when timeline event tag names are passed' do
let(:args) do
{
note: 'note',
occurred_at: Time.current,
action: 'new comment',
promoted_from_note: comment,
timeline_event_tag_names: ['Test tag 1']
}
end
it_behaves_like 'success response'
it 'matches the tag name' do
result = execute.payload[:timeline_event]
expect(result.timeline_event_tags.first).to eq(timeline_event_tag)
end
context 'when predefined tags are passed' do
let(:args) do
{
note: 'note',
occurred_at: Time.current,
action: 'new comment',
promoted_from_note: comment,
timeline_event_tag_names: ['start time', 'end time', 'Impact mitigated']
}
end
it_behaves_like 'success response'
it 'matches the two tags on the event and creates on project' do
result = execute.payload[:timeline_event]
expect(result.timeline_event_tags.count).to eq(3)
expect(result.timeline_event_tags.by_names(['Start time', 'End time', 'Impact mitigated']).pluck_names)
.to match_array(['Start time', 'End time', 'Impact mitigated'])
expect(project.incident_management_timeline_event_tags.pluck_names)
.to include('Start time', 'End time', 'Impact mitigated')
end
end
context 'when invalid tag names are passed' do
let(:args) do
{
note: 'note',
occurred_at: Time.current,
action: 'new comment',
promoted_from_note: comment,
timeline_event_tag_names: ['some other time']
}
end
it_behaves_like 'error response', "Following tags don't exist: [\"some other time\"]"
it 'does not create timeline event' do
expect { execute }.not_to change(IncidentManagement::TimelineEvent, :count)
end
end
end
context 'with editable param' do
let(:args) do
{
note: 'note',
occurred_at: Time.current,
action: 'new comment',
promoted_from_note: comment,
editable: editable
}
end
context 'when editable is true' do
let(:editable) { true }
it_behaves_like 'success response'
end
context 'when editable is false' do
let(:editable) { false }
it_behaves_like 'success response'
end
end
it 'successfully creates a database record', :aggregate_failures do
expect { execute }.to change { ::IncidentManagement::TimelineEvent.count }.by(1)
end
context 'when note is more than 280 characters long' do
let(:args) do
{
note: 'a' * 281,
occurred_at: Time.current,
action: 'new comment',
promoted_from_note: comment,
auto_created: auto_created
}
end
let(:auto_created) { false }
context 'when was not promoted from note' do
let(:comment) { nil }
context 'when auto_created is true' do
let(:auto_created) { true }
it_behaves_like 'success response'
end
context 'when auto_created is false' do
it_behaves_like 'error response', 'Timeline text is too long (maximum is 280 characters)'
end
end
context 'when promoted from note' do
it_behaves_like 'success response'
end
end
end
describe 'automatically created timeline events' do
shared_examples 'successfully created timeline event' do
it 'creates a timeline event', :aggregate_failures do
expect(execute).to be_success
result = execute.payload[:timeline_event]
expect(result).to be_a(::IncidentManagement::TimelineEvent)
expect(result.author).to eq(current_user)
expect(result.incident).to eq(incident)
expect(result.project).to eq(project)
expect(result.note).to eq(expected_note)
expect(result.editable).to eq(false)
expect(result.action).to eq(expected_action)
end
it_behaves_like 'an incident management tracked event', :incident_management_timeline_event_created
it_behaves_like 'Snowplow event tracking with RedisHLL context' do
let(:namespace) { project.namespace.reload }
let(:category) { described_class.to_s }
let(:user) { current_user }
let(:action) { 'incident_management_timeline_event_created' }
let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' }
end
it 'successfully creates a database record', :aggregate_failures do
expect { execute }.to change { ::IncidentManagement::TimelineEvent.count }.by(1)
end
it 'does not create a system note' do
expect { execute }.not_to change { incident.notes.reload.count }
end
end
describe '.create_incident' do
subject(:execute) { described_class.create_incident(incident, current_user) }
let(:expected_note) { "@#{current_user.username} created the incident" }
let(:expected_action) { 'issues' }
it_behaves_like 'successfully created timeline event'
end
describe '.reopen_incident' do
subject(:execute) { described_class.reopen_incident(incident, current_user) }
let(:expected_note) { "@#{current_user.username} reopened the incident" }
let(:expected_action) { 'issues' }
it_behaves_like 'successfully created timeline event'
end
describe '.resolve_incident' do
subject(:execute) { described_class.resolve_incident(incident, current_user) }
let(:expected_note) { "@#{current_user.username} resolved the incident" }
let(:expected_action) { 'status' }
it_behaves_like 'successfully created timeline event'
end
describe '.change_incident_status' do
subject(:execute) { described_class.change_incident_status(incident, current_user, escalation_status) }
let(:escalation_status) do
instance_double('IncidentManagement::IssuableEscalationStatus', status_name: 'acknowledged')
end
let(:expected_note) { "@#{current_user.username} changed the incident status to **Acknowledged**" }
let(:expected_action) { 'status' }
it_behaves_like 'successfully created timeline event'
end
describe '.change_severity' do
subject(:execute) { described_class.change_severity(incident, current_user) }
let_it_be(:severity) { create(:issuable_severity, severity: :critical, issue: incident) }
let(:expected_note) { "@#{current_user.username} changed the incident severity to **Critical - S1**" }
let(:expected_action) { 'severity' }
it_behaves_like 'successfully created timeline event'
end
describe '.change_labels' do
subject(:execute) do
described_class.change_labels(incident, current_user, added_labels: added, removed_labels: removed)
end
let_it_be(:labels) { create_list(:label, 4, project: project) }
let(:expected_action) { 'label' }
context 'when there are neither added nor removed labels' do
let(:added) { [] }
let(:removed) { [] }
it 'responds with error', :aggregate_failures do
expect(execute).to be_error
expect(execute.message).to eq(_('There are no changed labels'))
end
it 'does not create timeline event' do
expect { execute }.not_to change { incident.incident_management_timeline_events.count }
end
end
context 'when there are only added labels' do
let(:added) { [labels[0], labels[1]] }
let(:removed) { [] }
let(:expected_note) { "@#{current_user.username} added #{added.map(&:to_reference).join(' ')} labels" }
it_behaves_like 'successfully created timeline event'
end
context 'when there are only removed labels' do
let(:added) { [] }
let(:removed) { [labels[2], labels[3]] }
let(:expected_note) { "@#{current_user.username} removed #{removed.map(&:to_reference).join(' ')} labels" }
it_behaves_like 'successfully created timeline event'
end
context 'when there are both added and removed labels' do
let(:added) { [labels[0], labels[1]] }
let(:removed) { [labels[2], labels[3]] }
let(:expected_note) do
added_note = "added #{added.map(&:to_reference).join(' ')} labels"
removed_note = "removed #{removed.map(&:to_reference).join(' ')} labels"
"@#{current_user.username} #{added_note} and #{removed_note}"
end
it_behaves_like 'successfully created timeline event'
end
context 'when there is a single added and single removed labels' do
let(:added) { [labels[0]] }
let(:removed) { [labels[3]] }
let(:expected_note) do
added_note = "added #{added.first.to_reference} label"
removed_note = "removed #{removed.first.to_reference} label"
"@#{current_user.username} #{added_note} and #{removed_note}"
end
it_behaves_like 'successfully created timeline event'
end
context 'when feature flag is disabled' do
let(:added) { [labels[0], labels[1]] }
let(:removed) { [labels[2], labels[3]] }
before do
stub_feature_flags(incident_timeline_events_from_labels: false)
end
it 'does not create timeline event' do
expect { execute }.not_to change { incident.incident_management_timeline_events.count }
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module IssuableEscalationStatuses
class AfterUpdateService < ::BaseProjectService
def initialize(issuable, current_user, **params)
@issuable = issuable
@escalation_status = issuable.escalation_status
super(project: issuable.project, current_user: current_user, params: params)
end
def execute
after_update
ServiceResponse.success(payload: { escalation_status: escalation_status })
end
private
attr_reader :issuable, :escalation_status
def after_update
add_status_system_note
add_timeline_event
end
def add_status_system_note
return unless escalation_status.status_previously_changed?
SystemNoteService.change_incident_status(issuable, current_user, params[:status_change_reason])
end
def add_timeline_event
return unless escalation_status.status_previously_changed?
IncidentManagement::TimelineEvents::CreateService
.change_incident_status(issuable, current_user, escalation_status)
end
end
end
end
::IncidentManagement::IssuableEscalationStatuses::AfterUpdateService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::IssuableEscalationStatuses::AfterUpdateService,
feature_category: :incident_management do
let_it_be(:current_user) { create(:user) }
let_it_be(:escalation_status, reload: true) { create(:incident_management_issuable_escalation_status, :triggered) }
let_it_be(:issue, reload: true) { escalation_status.issue }
let_it_be(:project) { issue.project }
let(:service) { described_class.new(issue, current_user) }
subject(:result) do
issue.update!(incident_management_issuable_escalation_status_attributes: update_params)
service.execute
end
before do
issue.project.add_developer(current_user)
end
context 'with status attributes' do
let(:status_event) { :acknowledge }
let(:update_params) { { status_event: status_event } }
it 'adds a status change system note' do
expect { result }.to change { issue.reload.notes.count }.by(1)
end
it 'adds a status change timeline event' do
expect(IncidentManagement::TimelineEvents::CreateService)
.to receive(:change_incident_status)
.with(issue, current_user, escalation_status)
.and_call_original
expect { result }.to change { issue.reload.incident_management_timeline_events.count }.by(1)
end
end
context 'with non-status attributes' do
let(:update_params) { { updated_at: Time.current } }
it 'does not add a status change system note or timeline event' do
expect { result }
.to not_change { issue.reload.notes.count }
.and not_change { issue.reload.incident_management_timeline_events.count }
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module IssuableEscalationStatuses
class BuildService < ::BaseProjectService
def initialize(issue)
@issue = issue
super(project: issue.project)
end
def execute
issue.escalation_status || issue.build_incident_management_issuable_escalation_status
end
private
attr_reader :issue
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::IssuableEscalationStatuses::BuildService, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
let_it_be(:incident, reload: true) { create(:incident, project: project) }
let(:service) { described_class.new(incident) }
subject(:execute) { service.execute }
it_behaves_like 'initializes new escalation status with expected attributes'
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module IssuableEscalationStatuses
class PrepareUpdateService < ::BaseProjectService
include Gitlab::Utils::StrongMemoize
SUPPORTED_PARAMS = %i[status].freeze
def initialize(issuable, current_user, params)
@issuable = issuable
@param_errors = []
super(project: issuable.project, current_user: current_user, params: params)
end
def execute
return availability_error unless available?
filter_unsupported_params
filter_attributes
filter_redundant_params
return invalid_param_error if param_errors.any?
ServiceResponse.success(payload: { escalation_status: params })
end
private
attr_reader :issuable, :param_errors
def available?
issuable.supports_escalation? && user_has_permissions?
end
def user_has_permissions?
current_user&.can?(:update_escalation_status, issuable)
end
def escalation_status
strong_memoize(:escalation_status) do
issuable.escalation_status || BuildService.new(issuable).execute
end
end
def filter_unsupported_params
params.slice!(*supported_params)
end
def supported_params
SUPPORTED_PARAMS
end
def filter_attributes
filter_status
end
def filter_status
status = params.delete(:status)
return unless status
status_event = escalation_status.status_event_for(status)
add_param_error(:status) && return unless status_event
params[:status_event] = status_event
end
def filter_redundant_params
params.delete_if do |key, value|
current_params.key?(key) && current_params[key] == value
end
end
def current_params
strong_memoize(:current_params) do
{
status_event: escalation_status.status_event_for(escalation_status.status_name)
}
end
end
def add_param_error(param)
param_errors << param
end
def availability_error
ServiceResponse.error(message: 'Escalation status updates are not available for this issue, user, or project.')
end
def invalid_param_error
ServiceResponse.error(message: "Invalid value was provided for parameters: #{param_errors.join(', ')}")
end
end
end
end
::IncidentManagement::IssuableEscalationStatuses::PrepareUpdateService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::IssuableEscalationStatuses::PrepareUpdateService, factory_default: :keep,
feature_category: :incident_management do
let_it_be(:project) { create_default(:project) }
let_it_be(:escalation_status) { create(:incident_management_issuable_escalation_status, :triggered) }
let_it_be(:user_with_permissions) { create(:user) }
let(:current_user) { user_with_permissions }
let(:issue) { escalation_status.issue }
let(:status) { :acknowledged }
let(:params) { { status: status } }
let(:service) { described_class.new(issue, current_user, params) }
subject(:result) { service.execute }
before do
issue.project.add_developer(user_with_permissions)
end
shared_examples 'successful response' do |payload|
it 'returns valid parameters which can be used to update the issue' do
expect(result).to be_success
expect(result.payload).to eq(escalation_status: payload)
end
end
shared_examples 'error response' do |message|
specify do
expect(result).to be_error
expect(result.message).to eq(message)
end
end
shared_examples 'availability error response' do
include_examples 'error response', 'Escalation status updates are not available for this issue, user, or project.'
end
shared_examples 'invalid params error response' do
include_examples 'error response', 'Invalid value was provided for parameters: status'
end
it_behaves_like 'successful response', { status_event: :acknowledge }
context 'when user is anonymous' do
let(:current_user) { nil }
it_behaves_like 'availability error response'
end
context 'when user does not have permissions' do
let(:current_user) { create(:user) }
it_behaves_like 'availability error response'
end
context 'when called with an unsupported issue type' do
let(:issue) { create(:issue) }
it_behaves_like 'availability error response'
end
context 'when an IssuableEscalationStatus record for the issue does not exist' do
let(:issue) { create(:incident) }
it_behaves_like 'successful response', { status_event: :acknowledge }
it 'initializes an issuable escalation status record' do
expect { result }.not_to change(::IncidentManagement::IssuableEscalationStatus, :count)
expect(issue.escalation_status).to be_present
end
end
context 'when called nil params' do
let(:params) { nil }
it 'raises an exception' do
expect { result }.to raise_error NoMethodError
end
end
context 'when called without params' do
let(:params) { {} }
it_behaves_like 'successful response', {}
end
context 'when called with unsupported params' do
let(:params) { { escalations_started_at: Time.current } }
it_behaves_like 'successful response', {}
end
context 'with status param' do
context 'when status matches the current status' do
let(:params) { { status: :triggered } }
it_behaves_like 'successful response', {}
end
context 'when status is unsupported' do
let(:params) { { status: :mitigated } }
it_behaves_like 'invalid params error response'
end
context 'when status is a String' do
let(:params) { { status: 'acknowledged' } }
it_behaves_like 'successful response', { status_event: :acknowledge }
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module IssuableEscalationStatuses
class CreateService < ::BaseProjectService
def initialize(issue)
@issue = issue
super(project: issue.project)
end
def execute
escalation_status = BuildService.new(issue).execute
if escalation_status.save
ServiceResponse.success(payload: { escalation_status: escalation_status })
else
ServiceResponse.error(message: escalation_status.errors&.full_messages)
end
end
private
attr_reader :issue
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::IssuableEscalationStatuses::CreateService, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
let(:incident) { create(:incident, project: project) }
let(:service) { described_class.new(incident) }
subject(:execute) { service.execute }
it 'creates an escalation status for the incident with no policy set' do
expect { execute }.to change { incident.reload.escalation_status }.from(nil)
status = incident.escalation_status
expect(status.policy_id).to eq(nil)
expect(status.escalations_started_at).to eq(nil)
expect(status.status_name).to eq(:triggered)
end
context 'existing escalation status' do
let!(:existing_status) { create(:incident_management_issuable_escalation_status, issue: incident) }
it 'exits without changing anything' do
expect { execute }.not_to change { incident.reload.escalation_status }
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module PagerDuty
class ProcessWebhookService < ::BaseProjectService
include Gitlab::Utils::StrongMemoize
include IncidentManagement::Settings
# https://developer.pagerduty.com/docs/webhooks/webhook-behavior/#size-limit
PAGER_DUTY_PAYLOAD_SIZE_LIMIT = 55.kilobytes
# https://developer.pagerduty.com/docs/db0fa8c8984fc-overview#event-types
PAGER_DUTY_PROCESSABLE_EVENT_TYPES = %w[incident.triggered].freeze
def initialize(project, payload)
super(project: project)
@payload = payload
end
def execute(token)
return forbidden unless webhook_setting_active?
return unauthorized unless valid_token?(token)
return bad_request unless valid_payload_size?
process_incidents
accepted
end
private
attr_reader :payload
def process_incidents
event = pager_duty_processable_event
return unless event
::IncidentManagement::PagerDuty::ProcessIncidentWorker
.perform_async(project.id, event['incident'])
end
def pager_duty_processable_event
strong_memoize(:pager_duty_processable_event) do
event = ::PagerDuty::WebhookPayloadParser.call(payload.to_h)
event if event['event'].to_s.in?(PAGER_DUTY_PROCESSABLE_EVENT_TYPES)
end
end
def webhook_setting_active?
incident_management_setting.pagerduty_active?
end
def valid_token?(token)
token && incident_management_setting.pagerduty_token == token
end
def valid_payload_size?
Gitlab::Utils::DeepSize.new(payload, max_size: PAGER_DUTY_PAYLOAD_SIZE_LIMIT).valid?
end
def accepted
ServiceResponse.success(http_status: :accepted)
end
def forbidden
ServiceResponse.error(message: 'Forbidden', http_status: :forbidden)
end
def unauthorized
ServiceResponse.error(message: 'Unauthorized', http_status: :unauthorized)
end
def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::PagerDuty::ProcessWebhookService, feature_category: :incident_management do
let_it_be(:project, reload: true) { create(:project) }
describe '#execute' do
shared_examples 'does not process incidents' do
it 'does not process incidents' do
expect(::IncidentManagement::PagerDuty::ProcessIncidentWorker).not_to receive(:perform_async)
execute
end
end
let(:webhook_payload) { Gitlab::Json.parse(fixture_file('pager_duty/webhook_incident_trigger.json')) }
let(:token) { nil }
subject(:execute) { described_class.new(project, webhook_payload).execute(token) }
context 'when PagerDuty webhook setting is active' do
let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: true) }
context 'when token is valid' do
let(:token) { incident_management_setting.pagerduty_token }
context 'when webhook payload has acceptable size' do
it 'responds with Accepted' do
result = execute
expect(result).to be_success
expect(result.http_status).to eq(:accepted)
end
it 'processes issues' do
incident_payload = ::PagerDuty::WebhookPayloadParser.call(webhook_payload)['incident']
expect(::IncidentManagement::PagerDuty::ProcessIncidentWorker)
.to receive(:perform_async)
.with(project.id, incident_payload)
.once
execute
end
end
context 'when webhook payload is too big' do
let(:deep_size) { instance_double(Gitlab::Utils::DeepSize, valid?: false) }
before do
allow(Gitlab::Utils::DeepSize)
.to receive(:new)
.with(webhook_payload, max_size: described_class::PAGER_DUTY_PAYLOAD_SIZE_LIMIT)
.and_return(deep_size)
end
it 'responds with Bad Request' do
result = execute
expect(result).to be_error
expect(result.http_status).to eq(:bad_request)
end
it_behaves_like 'does not process incidents'
end
context 'when webhook payload is blank' do
let(:webhook_payload) { nil }
it 'responds with Accepted' do
result = execute
expect(result).to be_success
expect(result.http_status).to eq(:accepted)
end
it_behaves_like 'does not process incidents'
end
end
context 'when token is invalid' do
let(:token) { 'invalid-token' }
it 'responds with Unauthorized' do
result = execute
expect(result).to be_error
expect(result.http_status).to eq(:unauthorized)
end
it_behaves_like 'does not process incidents'
end
end
context 'when both tokens are nil' do
let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
let(:token) { nil }
before do
incident_management_setting.update_column(:pagerduty_active, true)
end
it 'responds with Unauthorized' do
result = execute
expect(result).to be_error
expect(result.http_status).to eq(:unauthorized)
end
it_behaves_like 'does not process incidents'
end
context 'when PagerDuty webhook setting is not active' do
let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
it 'responds with Forbidden' do
result = execute
expect(result).to be_error
expect(result.http_status).to eq(:forbidden)
end
it_behaves_like 'does not process incidents'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module PagerDuty
class CreateIncidentIssueService < BaseService
include IncidentManagement::Settings
def initialize(project, incident_payload)
super(project, Users::Internal.alert_bot, incident_payload)
end
def execute
return forbidden unless webhook_available?
create_incident
end
private
alias_method :incident_payload, :params
def create_incident
::IncidentManagement::Incidents::CreateService.new(
project,
current_user,
title: issue_title,
description: issue_description
).execute
end
def webhook_available?
incident_management_setting.pagerduty_active?
end
def forbidden
ServiceResponse.error(message: 'Forbidden', http_status: :forbidden)
end
def issue_title
incident_payload['title']
end
def issue_description
Gitlab::IncidentManagement::PagerDuty::IncidentIssueDescription.new(incident_payload).to_s
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService, feature_category: :incident_management do
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:user) { Users::Internal.alert_bot }
let(:webhook_payload) { Gitlab::Json.parse(fixture_file('pager_duty/webhook_incident_trigger.json')) }
let(:parsed_payload) { ::PagerDuty::WebhookPayloadParser.call(webhook_payload) }
let(:incident_payload) { parsed_payload['incident'] }
subject(:execute) { described_class.new(project, incident_payload).execute }
describe '#execute' do
context 'when PagerDuty webhook setting is active' do
let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: true) }
context 'when issue can be created' do
it 'creates a new issue' do
expect { execute }.to change(Issue, :count).by(1)
end
it 'responds with success' do
response = execute
expect(response).to be_success
expect(response.payload[:issue]).to be_kind_of(Issue)
end
it 'the issue author is Alert bot' do
expect(execute.payload[:issue].author).to eq(Users::Internal.alert_bot)
end
it 'issue has a correct title' do
expect(execute.payload[:issue].title).to eq(incident_payload['title'])
end
it 'issue has a correct description' do
markdown_line_break = ' '
expect(execute.payload[:issue].description).to eq(
<<~MARKDOWN.chomp
**Incident:** [[FILTERED]](https://gitlab-1.pagerduty.com/incidents/Q1XZUF87W1HB5A)#{markdown_line_break}
**Incident number:** 2#{markdown_line_break}
**Urgency:** high#{markdown_line_break}
**Status:** triggered#{markdown_line_break}
**Incident key:** [FILTERED]#{markdown_line_break}
**Created at:** 30 November 2022, 8:46AM (UTC)#{markdown_line_break}
**Assignees:** [Rajendra Kadam](https://gitlab-1.pagerduty.com/users/PIN0B5C)#{markdown_line_break}
**Impacted service:** [Test service](https://gitlab-1.pagerduty.com/services/PK6IKMT)
MARKDOWN
)
end
end
context 'when the payload does not contain a title' do
let(:incident_payload) { {} }
it 'does not create a GitLab issue' do
expect { execute }.not_to change(Issue, :count)
end
it 'responds with error' do
expect(execute).to be_error
expect(execute.errors).to contain_exactly("Title can't be blank")
end
end
end
context 'when PagerDuty webhook setting is not active' do
let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
it 'does not create a GitLab issue' do
expect { execute }.not_to change(Issue, :count)
end
it 'responds with forbidden' do
expect(execute).to be_error
expect(execute.http_status).to eq(:forbidden)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IncidentManagement
module Incidents
class CreateService < ::BaseProjectService
ISSUE_TYPE = 'incident'
def initialize(project, current_user, title:, description:, severity: IssuableSeverity::DEFAULT, alert: nil)
super(project: project, current_user: current_user)
@title = title
@description = description
@severity = severity
@alert = alert
end
def execute
create_result = Issues::CreateService.new(
container: project,
current_user: current_user,
params: {
title: title,
description: description,
issue_type: ISSUE_TYPE,
severity: severity,
alert_management_alerts: [alert].compact
},
perform_spam_check: false
).execute
if alert
return error(alert.errors.full_messages, create_result[:issue]) unless alert.valid?
end
create_result
end
private
attr_reader :title, :description, :severity, :alert
def error(message, issue = nil)
ServiceResponse.error(payload: { issue: issue }, message: message)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IncidentManagement::Incidents::CreateService, feature_category: :incident_management do
let_it_be(:project) { create(:project) }
let_it_be(:user) { Users::Internal.alert_bot }
let(:description) { 'Incident description' }
describe '#execute' do
subject(:create_incident) { described_class.new(project, user, title: title, description: description).execute }
context 'when incident has title and description' do
let(:title) { 'Incident title' }
let(:new_issue) { Issue.last! }
it 'responds with success' do
expect(create_incident).to be_success
end
it 'creates an incident issue' do
expect { create_incident }.to change(Issue, :count).by(1)
end
it 'created issue has correct attributes', :aggregate_failures do
create_incident
expect(new_issue.title).to eq(title)
expect(new_issue.description).to eq(description)
expect(new_issue.author).to eq(user)
end
it_behaves_like 'incident issue' do
before do
create_incident
end
let(:issue) { new_issue }
end
context 'with default severity' do
it 'sets the correct severity level to "unknown"' do
create_incident
expect(new_issue.severity).to eq(IssuableSeverity::DEFAULT)
end
end
context 'with severity' do
using RSpec::Parameterized::TableSyntax
subject(:create_incident) { described_class.new(project, user, title: title, description: description, severity: severity).execute }
where(:severity, :incident_severity) do
'critical' | 'critical'
'high' | 'high'
'medium' | 'medium'
'low' | 'low'
'unknown' | 'unknown'
end
with_them do
it 'sets the correct severity level' do
create_incident
expect(new_issue.severity).to eq(incident_severity)
end
end
end
context 'with an alert' do
subject(:create_incident) { described_class.new(project, user, title: title, description: description, alert: alert).execute }
context 'when the alert is valid' do
let(:alert) { create(:alert_management_alert, project: project) }
it 'associates the alert with the incident' do
expect(create_incident[:issue].reload.alert_management_alerts).to match_array([alert])
end
end
context 'when the alert is not valid' do
let(:alert) { create(:alert_management_alert, :with_validation_errors, project: project) }
it 'does not associate the alert with the incident' do
expect(create_incident[:issue].reload.alert_management_alerts).to be_empty
end
end
end
end
context 'when incident has no title' do
let(:title) { '' }
it 'does not create an issue' do
expect { create_incident }.not_to change(Issue, :count)
end
it 'responds with errors' do
expect(create_incident).to be_error
expect(create_incident.errors).to contain_exactly("Title can't be blank")
end
it 'result payload contains an Issue object' do
expect(create_incident.payload[:issue]).to be_kind_of(Issue)
end
context 'with alert' do
let(:alert) { create(:alert_management_alert, project: project) }
subject(:create_incident) { described_class.new(project, user, title: title, description: description, alert: alert).execute }
context 'the alert prevents the issue from saving' do
let(:alert) { create(:alert_management_alert, :with_validation_errors, project: project) }
it 'responds with errors' do
expect(create_incident).to be_error
expect(create_incident.errors).to contain_exactly('Hosts hosts array is over 255 chars')
end
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Labels
class FindOrCreateService
def initialize(current_user, parent, params = {})
@current_user = current_user
@parent = parent
@available_labels = params.delete(:available_labels)
@existing_labels_by_title = params.delete(:existing_labels_by_title)
@params = params.dup.with_indifferent_access
end
def execute(skip_authorization: false, find_only: false)
@skip_authorization = skip_authorization
find_or_create_label(find_only: find_only)
end
private
attr_reader :current_user, :parent, :params, :skip_authorization, :existing_labels_by_title
def available_labels
@available_labels ||= LabelsFinder.new(
current_user,
"#{parent_type}_id".to_sym => parent.id,
include_ancestor_groups: include_ancestor_groups?,
only_group_labels: parent_is_group?
).execute(skip_authorization: skip_authorization)
end
# Only creates the label if current_user can do so, if the label does not exist
# and the user can not create the label, nil is returned
def find_or_create_label(find_only: false)
new_label = find_existing_label(title)
return new_label if find_only
if new_label.nil? && (skip_authorization || Ability.allowed?(current_user, :admin_label, parent))
create_params = params.except(:include_ancestor_groups)
new_label = Labels::CreateService.new(create_params).execute(parent_type.to_sym => parent)
end
new_label
end
# rubocop: disable CodeReuse/ActiveRecord
def find_existing_label(title)
return existing_labels_by_title[title] if existing_labels_by_title
available_labels.find_by(title: title)
end
# rubocop: enable CodeReuse/ActiveRecord
def title
params[:title] || params[:name]
end
def parent_type
parent.model_name.param_key
end
def parent_is_group?
parent_type == "group"
end
def include_ancestor_groups?
params[:include_ancestor_groups] == true
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Labels::FindOrCreateService, feature_category: :team_planning do
describe '#execute' do
let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) }
let(:params) do
{
title: 'Security',
description: 'Security related stuff.',
color: '#FF0000'
}
end
context 'when acting on behalf of a specific user' do
let(:user) { create(:user) }
context 'when finding labels on project level' do
subject(:service) { described_class.new(user, project, params) }
before do
project.add_developer(user)
end
context 'when existing_labels_by_title is provided' do
let(:preloaded_label) { build(:label, title: 'Security') }
before do
params.merge!(
existing_labels_by_title: {
'Security' => preloaded_label
})
end
context 'when label exists' do
it 'returns preloaded label' do
expect(service.execute).to eq preloaded_label
end
end
context 'when label does not exists' do
before do
params[:title] = 'Audit'
end
it 'does not generates additional label search' do
service.execute
expect(LabelsFinder).not_to receive(:new)
end
end
end
context 'when label does not exist at group level' do
it 'creates a new label at project level' do
expect { service.execute }.to change(project.labels, :count).by(1)
end
end
context 'when label exists at group level' do
it 'returns the group label' do
group_label = create(:group_label, group: group, title: 'Security')
expect(service.execute).to eq group_label
end
end
context 'when label exists at project level' do
it 'returns the project label' do
project_label = create(:label, project: project, title: 'Security')
expect(service.execute).to eq project_label
end
end
context 'when include_ancestor_groups is true' do
let(:group) { create(:group, :nested) }
let(:params) do
{
title: 'Audit',
include_ancestor_groups: true
}
end
it 'returns the ancestor group labels' do
group_label = create(:group_label, group: group.parent, title: 'Audit')
expect(service.execute).to eq group_label
end
it 'creates new labels if labels are not found' do
expect { service.execute }.to change(project.labels, :count).by(1)
end
end
end
context 'when finding labels on group level' do
subject(:service) { described_class.new(user, group, params) }
before do
group.add_developer(user)
end
context 'when label does not exist at group level' do
it 'creates a new label at group level' do
expect { service.execute }.to change(group.labels, :count).by(1)
end
end
context 'when label exists at group level' do
it 'returns the group label' do
group_label = create(:group_label, group: group, title: 'Security')
expect(service.execute).to eq group_label
end
end
end
end
context 'when authorization is not required' do
context 'when finding labels on project level' do
subject(:service) { described_class.new(nil, project, params) }
it 'returns the project label' do
project_label = create(:label, project: project, title: 'Security')
expect(service.execute(skip_authorization: true)).to eq project_label
end
end
context 'when finding labels on group level' do
subject(:service) { described_class.new(nil, group, params) }
it 'returns the group label' do
group_label = create(:group_label, group: group, title: 'Security')
expect(service.execute(skip_authorization: true)).to eq group_label
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Labels
class PromoteService < BaseService
BATCH_SIZE = 1000
# rubocop: disable CodeReuse/ActiveRecord
def execute(label)
return unless project.group &&
label.is_a?(ProjectLabel)
ProjectLabel.transaction do
# use the existing group label if it exists
group_label = find_or_create_group_label(label)
label_ids_for_merge(group_label).find_in_batches(batch_size: BATCH_SIZE) do |batched_ids|
update_old_label_relations(group_label, batched_ids)
destroy_project_labels(batched_ids)
end
group_label
end
end
# rubocop: enable CodeReuse/ActiveRecord
private
def update_old_label_relations(group_label, old_label_ids)
update_issuables(group_label, old_label_ids)
update_resource_label_events(group_label, old_label_ids)
update_issue_board_lists(group_label, old_label_ids)
update_priorities(group_label, old_label_ids)
subscribe_users(group_label, old_label_ids)
end
# rubocop: disable CodeReuse/ActiveRecord
def subscribe_users(group_label, label_ids)
# users can be subscribed to multiple labels that will be merged into the group one
# we want to keep only one subscription / user
ids_to_update = Subscription.where(subscribable_id: label_ids, subscribable_type: 'Label')
.group(:user_id)
.pluck('MAX(id)')
Subscription.where(id: ids_to_update).update_all(subscribable_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def label_ids_for_merge(group_label)
LabelsFinder
.new(current_user, title: group_label.title, group_id: project.group.id)
.execute(skip_authorization: true)
.where.not(id: group_label)
.select(:id, :project_id, :group_id, :type) # Can't use pluck() to avoid object-creation because of the batching
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def update_issuables(group_label, label_ids)
LabelLink
.where(label: label_ids)
.update_all(label_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def update_resource_label_events(group_label, label_ids)
ResourceLabelEvent
.where(label: label_ids)
.update_all(label_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def update_issue_board_lists(group_label, label_ids)
List
.where(label: label_ids)
.update_all(label_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def update_priorities(group_label, label_ids)
LabelPriority
.where(label: label_ids)
.update_all(label_id: group_label.id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def destroy_project_labels(label_ids)
Label.where(id: label_ids).destroy_all # rubocop: disable Cop/DestroyAll
end
def find_or_create_group_label(label)
params = label.attributes.slice('title', 'description', 'color')
new_label = GroupLabel.create_with(params).find_or_initialize_by(group_id: project.group.id, title: label.title)
new_label.save! unless new_label.persisted?
new_label
end
end
end
Labels::PromoteService.prepend_mod_with('Labels::PromoteService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Labels::PromoteService, feature_category: :team_planning do
describe '#execute' do
let_it_be(:user) { create(:user) }
context 'without a group' do
let!(:project_1) { create(:project) }
let!(:project_label_1_1) { create(:label, project: project_1) }
subject(:service) { described_class.new(project_1, user) }
it 'fails on project without group' do
expect(service.execute(project_label_1_1)).to be_falsey
end
end
context 'with a group' do
let_it_be(:promoted_label_name) { "Promoted Label" }
let_it_be(:untouched_label_name) { "Untouched Label" }
let_it_be(:promoted_description) { "Promoted Description" }
let_it_be(:promoted_color) { "#0000FF" }
let_it_be(:label_2_1_priority) { 1 }
let_it_be(:label_3_1_priority) { 2 }
let_it_be(:group_1) { create(:group) }
let_it_be(:group_2) { create(:group) }
let_it_be(:project_1) { create(:project, :repository, namespace: group_1) }
let_it_be(:project_2) { create(:project, :repository, namespace: group_1) }
let_it_be(:project_3) { create(:project, :repository, namespace: group_1) }
let_it_be(:project_4) { create(:project, :repository, namespace: group_2) }
# Labels/issues can't be lazily created so we might as well eager initialize
# all other objects too since we use them inside
let_it_be(:project_label_1_1) { create(:label, project: project_1, name: promoted_label_name, color: promoted_color, description: promoted_description) }
let_it_be(:project_label_1_2) { create(:label, project: project_1, name: untouched_label_name) }
let_it_be(:project_label_2_1) { create(:label, project: project_2, priority: label_2_1_priority, name: promoted_label_name, color: "#FF0000") }
let_it_be(:project_label_3_1) { create(:label, project: project_3, priority: label_3_1_priority, name: promoted_label_name) }
let_it_be(:project_label_3_2) { create(:label, project: project_3, priority: 1, name: untouched_label_name) }
let_it_be(:project_label_4_1) { create(:label, project: project_4, name: promoted_label_name) }
let_it_be(:issue_1_1) { create(:labeled_issue, project: project_1, labels: [project_label_1_1, project_label_1_2]) }
let_it_be(:issue_1_2) { create(:labeled_issue, project: project_1, labels: [project_label_1_2]) }
let_it_be(:issue_2_1) { create(:labeled_issue, project: project_2, labels: [project_label_2_1]) }
let_it_be(:issue_4_1) { create(:labeled_issue, project: project_4, labels: [project_label_4_1]) }
let_it_be(:merge_3_1) { create(:labeled_merge_request, source_project: project_3, target_project: project_3, labels: [project_label_3_1, project_label_3_2]) }
let_it_be(:issue_board_2_1) { create(:board, project: project_2) }
let_it_be(:issue_board_list_2_1) { create(:list, board: issue_board_2_1, label: project_label_2_1) }
let(:new_label) { group_1.labels.find_by(title: promoted_label_name) }
subject(:service) { described_class.new(project_1, user) }
it 'fails on group label' do
group_label = create(:group_label, group: group_1)
expect(service.execute(group_label)).to be_falsey
end
shared_examples 'promoting a project label to a group label' do
it 'is truthy on success' do
expect(service.execute(project_label_1_1)).to be_truthy
end
it 'removes all project labels with that title within the group' do
expect { service.execute(project_label_1_1) }.to change(project_2.labels, :count).by(-1).and \
change(project_3.labels, :count).by(-1)
end
it 'keeps users\' subscriptions' do
user2 = create(:user)
project_label_1_1.subscriptions.create!(user: user, subscribed: true)
project_label_2_1.subscriptions.create!(user: user, subscribed: true)
project_label_3_2.subscriptions.create!(user: user, subscribed: true)
project_label_2_1.subscriptions.create!(user: user2, subscribed: true)
expect { service.execute(project_label_1_1) }.to change { Subscription.count }.from(4).to(3)
expect(new_label).to be_subscribed(user)
expect(new_label).to be_subscribed(user2)
end
it 'recreates priorities' do
service.execute(project_label_1_1)
expect(new_label.priority(project_1)).to be_nil
expect(new_label.priority(project_2)).to eq(label_2_1_priority)
expect(new_label.priority(project_3)).to eq(label_3_1_priority)
end
it 'does not touch project out of promoted group' do
service.execute(project_label_1_1)
project_4_new_label = project_4.labels.find_by(title: promoted_label_name)
expect(project_4_new_label).not_to be_nil
expect(project_4_new_label.id).to eq(project_label_4_1.id)
end
it 'does not touch out of group priority' do
service.execute(project_label_1_1)
expect(new_label.priority(project_4)).to be_nil
end
it 'relinks issue with the promoted label' do
service.execute(project_label_1_1)
issue_label = issue_1_1.labels.find_by(title: promoted_label_name)
expect(issue_label).not_to be_nil
expect(issue_label.id).to eq(new_label.id)
end
it 'does not remove untouched labels from issue' do
expect { service.execute(project_label_1_1) }.not_to change(issue_1_1.labels, :count)
end
it 'does not relink untouched label in issue' do
service.execute(project_label_1_1)
issue_label = issue_1_2.labels.find_by(title: untouched_label_name)
expect(issue_label).not_to be_nil
expect(issue_label.id).to eq(project_label_1_2.id)
end
it 'relinks issues with merged labels' do
service.execute(project_label_1_1)
issue_label = issue_2_1.labels.find_by(title: promoted_label_name)
expect(issue_label).not_to be_nil
expect(issue_label.id).to eq(new_label.id)
end
it 'does not relink issues from other group' do
service.execute(project_label_1_1)
issue_label = issue_4_1.labels.find_by(title: promoted_label_name)
expect(issue_label).not_to be_nil
expect(issue_label.id).to eq(project_label_4_1.id)
end
it 'updates merge request' do
service.execute(project_label_1_1)
merge_label = merge_3_1.labels.find_by(title: promoted_label_name)
expect(merge_label).not_to be_nil
expect(merge_label.id).to eq(new_label.id)
end
it 'updates board lists' do
service.execute(project_label_1_1)
list = issue_board_2_1.lists.find_by(label: new_label)
expect(list).not_to be_nil
end
# In case someone adds a new relation to Label.rb and forgets to relink it
# and the database doesn't have foreign key constraints
it 'relinks all relations' do
service.execute(project_label_1_1)
Label.reflect_on_all_associations.each do |association|
expect(project_label_1_1.send(association.name).reset).not_to be_any
end
end
end
context 'when there is an existing identical group label' do
let!(:existing_group_label) { create(:group_label, group: group_1, title: project_label_1_1.title) }
it 'uses the existing group label' do
expect { service.execute(project_label_1_1) }
.to change(project_1.labels, :count).by(-1)
.and not_change(group_1.labels, :count)
expect(new_label).not_to be_nil
end
it 'does not create a new group label clone' do
expect { service.execute(project_label_1_1) }.not_to change { GroupLabel.maximum(:id) }
end
it_behaves_like 'promoting a project label to a group label'
end
context 'when there is no existing identical group label' do
let(:existing_group_label) { nil }
it 'recreates the label as a group label' do
expect { service.execute(project_label_1_1) }
.to change(project_1.labels, :count).by(-1)
.and change(group_1.labels, :count).by(1)
expect(new_label).not_to be_nil
end
it 'copies title, description and color to cloned group label' do
service.execute(project_label_1_1)
expect(new_label.title).to eq(promoted_label_name)
expect(new_label.description).to eq(promoted_description)
expect(new_label.color).to be_color(promoted_color)
end
it_behaves_like 'promoting a project label to a group label'
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Labels::TransferService class
#
# User for recreate the missing group labels at project level
#
module Labels
class TransferService
def initialize(current_user, old_group, project)
@current_user = current_user
@old_group = old_group
@project = project
end
def execute
return unless old_group.present?
# rubocop: disable CodeReuse/ActiveRecord
link_ids = group_labels_applied_to_issues.pluck("label_links.id") +
group_labels_applied_to_merge_requests.pluck("label_links.id")
# rubocop: disable CodeReuse/ActiveRecord
Label.transaction do
labels_to_transfer.find_each do |label|
new_label_id = find_or_create_label!(label)
next if new_label_id == label.id
update_label_links(link_ids, old_label_id: label.id, new_label_id: new_label_id)
update_label_priorities(old_label_id: label.id, new_label_id: new_label_id)
end
end
end
private
attr_reader :current_user, :old_group, :project
# rubocop: disable CodeReuse/ActiveRecord
def labels_to_transfer
Label
.from_union([
group_labels_applied_to_issues,
group_labels_applied_to_merge_requests
])
.reorder(nil)
.distinct
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def group_labels_applied_to_issues
@labels_applied_to_issues ||= Label.joins(:issues)
.joins("INNER JOIN namespaces on namespaces.id = labels.group_id AND namespaces.type = 'Group'")
.where(issues: { project_id: project.id }).reorder(nil)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def group_labels_applied_to_merge_requests
@labels_applied_to_mrs ||= Label.joins(:merge_requests)
.joins("INNER JOIN namespaces on namespaces.id = labels.group_id AND namespaces.type = 'Group'")
.where(merge_requests: { target_project_id: project.id }).reorder(nil)
end
# rubocop: enable CodeReuse/ActiveRecord
def find_or_create_label!(label)
params = label.attributes.slice('title', 'description', 'color')
new_label = FindOrCreateService.new(current_user, project, params.merge(include_ancestor_groups: true)).execute
new_label.id
end
# rubocop: disable CodeReuse/ActiveRecord
def update_label_links(link_ids, old_label_id:, new_label_id:)
LabelLink.where(id: link_ids, label_id: old_label_id)
.update_all(label_id: new_label_id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def update_label_priorities(old_label_id:, new_label_id:)
LabelPriority.where(project_id: project.id, label_id: old_label_id)
.update_all(label_id: new_label_id)
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Labels::TransferService, feature_category: :team_planning do
shared_examples 'transfer labels' do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:old_group_ancestor) { create(:group) }
let_it_be(:old_group) { create(:group, parent: old_group_ancestor) }
let_it_be(:new_group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: new_group) }
subject(:service) { described_class.new(user, old_group, project) }
before do
old_group_ancestor.add_developer(user)
new_group.add_developer(user)
end
it 'recreates missing group labels at project level and assigns them to the issuables' do
old_group_label_1 = create(:group_label, group: old_group)
old_group_label_2 = create(:group_label, group: old_group)
labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label_1])
labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_label_2])
expect { service.execute }.to change(project.labels, :count).by(2)
expect(labeled_issue.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_label_1.title))
expect(labeled_merge_request.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_label_2.title))
end
it 'recreates missing ancestor group labels at project level and assigns them to the issuables' do
old_group_ancestor_label_1 = create(:group_label, group: old_group_ancestor)
old_group_ancestor_label_2 = create(:group_label, group: old_group_ancestor)
labeled_issue = create(:labeled_issue, project: project, labels: [old_group_ancestor_label_1])
labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_ancestor_label_2])
expect { service.execute }.to change(project.labels, :count).by(2)
expect(labeled_issue.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_ancestor_label_1.title))
expect(labeled_merge_request.reload.labels).to contain_exactly(project.labels.find_by_title(old_group_ancestor_label_2.title))
end
it 'recreates label priorities related to the missing group labels' do
old_group_label = create(:group_label, group: old_group)
create(:labeled_issue, project: project, labels: [old_group_label])
create(:label_priority, project: project, label: old_group_label, priority: 1)
service.execute
new_project_label = project.labels.find_by(title: old_group_label.title)
expect(new_project_label.id).not_to eq old_group_label.id
expect(new_project_label.priorities).not_to be_empty
end
it 'does not recreate missing group labels that are not applied to issues or merge requests' do
old_group_label = create(:group_label, group: old_group)
service.execute
expect(project.labels.where(title: old_group_label.title)).to be_empty
end
it 'does not recreate missing group labels that already exist in the project group' do
old_group_label = create(:group_label, group: old_group)
labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label])
new_group_label = create(:group_label, group: new_group, title: old_group_label.title)
service.execute
expect(project.labels.where(title: old_group_label.title)).to be_empty
expect(labeled_issue.reload.labels).to contain_exactly(new_group_label)
end
it 'updates only label links in the given project' do
old_group_label = create(:group_label, group: old_group)
other_project = create(:project, group: old_group)
labeled_issue = create(:labeled_issue, project: project, labels: [old_group_label])
other_project_labeled_issue = create(:labeled_issue, project: other_project, labels: [old_group_label])
service.execute
expect(labeled_issue.reload.labels).not_to include(old_group_label)
expect(other_project_labeled_issue.reload.labels).to contain_exactly(old_group_label)
end
context 'when moving within the same ancestor group' do
let(:other_subgroup) { create(:group, parent: old_group_ancestor) }
let(:project) { create(:project, :repository, group: other_subgroup) }
it 'does not recreate ancestor group labels' do
old_group_ancestor_label_1 = create(:group_label, group: old_group_ancestor)
old_group_ancestor_label_2 = create(:group_label, group: old_group_ancestor)
labeled_issue = create(:labeled_issue, project: project, labels: [old_group_ancestor_label_1])
labeled_merge_request = create(:labeled_merge_request, source_project: project, labels: [old_group_ancestor_label_2])
expect { service.execute }.not_to change(project.labels, :count)
expect(labeled_issue.reload.labels).to contain_exactly(old_group_ancestor_label_1)
expect(labeled_merge_request.reload.labels).to contain_exactly(old_group_ancestor_label_2)
end
end
end
end
it_behaves_like 'transfer labels'
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Labels
class AvailableLabelsService
attr_reader :current_user, :parent, :params
def initialize(current_user, parent, params)
@current_user = current_user
@parent = parent
@params = params
end
def find_or_create_by_titles(key = :labels, find_only: false)
labels = params.delete(key)
return [] unless labels
labels = labels.split(',').map(&:strip) if labels.is_a?(String)
existing_labels = LabelsFinder.new(current_user, finder_params(labels)).execute.index_by(&:title)
labels.map do |label_name|
label = Labels::FindOrCreateService.new(
current_user,
parent,
include_ancestor_groups: true,
title: label_name,
existing_labels_by_title: existing_labels
).execute(find_only: find_only)
label
end.compact
end
def filter_labels_ids_in_param(key)
ids = Array.wrap(params[key])
return [] if ids.empty?
# rubocop:disable CodeReuse/ActiveRecord
existing_ids = available_labels.id_in(ids).pluck(:id)
# rubocop:enable CodeReuse/ActiveRecord
ids.map(&:to_i) & existing_ids
end
def filter_locked_label_ids(ids)
available_labels.with_lock_on_merge.id_in(ids).pluck(:id) # rubocop:disable CodeReuse/ActiveRecord
end
def available_labels
@available_labels ||= LabelsFinder.new(current_user, finder_params).execute
end
private
def finder_params(titles = nil)
finder_params = { include_ancestor_groups: true }
finder_params[:title] = titles if titles
case parent
when Group
finder_params[:group_id] = parent.id
finder_params[:only_group_labels] = true
when Project
finder_params[:project_id] = parent.id
end
finder_params
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Labels::AvailableLabelsService, feature_category: :team_planning do
let_it_be(:user) { create(:user) }
let(:project) { create(:project, :public, group: group) }
let(:group) { create(:group) }
let(:project_label) { create(:label, project: project) }
let(:project_label_locked) { create(:label, project: project, lock_on_merge: true) }
let(:other_project_label) { create(:label) }
let(:other_project_label_locked) { create(:label, lock_on_merge: true) }
let(:group_label) { create(:group_label, group: group) }
let(:group_label_locked) { create(:group_label, group: group, lock_on_merge: true) }
let(:other_group_label) { create(:group_label) }
let!(:labels) { [project_label, other_project_label, group_label, other_group_label, project_label_locked, other_project_label_locked, group_label_locked] }
describe '#find_or_create_by_titles' do
let(:label_titles) { labels.map(&:title).push('non existing title') }
context 'when parent is a project' do
context 'when a user is not a project member' do
it 'returns only relevant label ids' do
result = described_class.new(user, project, labels: label_titles).find_or_create_by_titles
expect(result).to match_array([project_label, group_label, project_label_locked, group_label_locked])
end
end
context 'when a user is a project member' do
before do
project.add_developer(user)
end
it 'creates new labels for not found titles' do
result = described_class.new(user, project, labels: label_titles).find_or_create_by_titles
expect(result.count).to eq(8)
expect(result).to include(project_label, group_label)
expect(result).not_to include(other_project_label, other_group_label)
end
it 'do not cause additional query for finding labels' do
label_titles = [project_label.title]
control_count = ActiveRecord::QueryRecorder.new { described_class.new(user, project, labels: label_titles).find_or_create_by_titles }
new_label = create(:label, project: project)
label_titles = [project_label.title, new_label.title]
expect { described_class.new(user, project, labels: label_titles).find_or_create_by_titles }.not_to exceed_query_limit(control_count)
end
end
end
context 'when parent is a group' do
context 'when a user is not a group member' do
it 'returns only relevant label ids' do
result = described_class.new(user, group, labels: label_titles).find_or_create_by_titles
expect(result).to match_array([group_label, group_label_locked])
end
end
context 'when a user is a group member' do
before do
group.add_developer(user)
end
it 'creates new labels for not found titles' do
result = described_class.new(user, group, labels: label_titles).find_or_create_by_titles
expect(result.count).to eq(8)
expect(result).to include(group_label, group_label_locked)
expect(result).not_to include(project_label, other_project_label, other_group_label, project_label_locked, other_project_label_locked)
end
end
end
end
describe '#filter_labels_ids_in_param' do
let(:label_ids) { labels.map(&:id).push(non_existing_record_id) }
context 'when parent is a project' do
it 'returns only relevant label ids' do
result = described_class.new(user, project, ids: label_ids).filter_labels_ids_in_param(:ids)
expect(result).to match_array([project_label.id, group_label.id, project_label_locked.id, group_label_locked.id])
end
it 'returns labels in preserved order' do
result = described_class.new(user, project, ids: label_ids.reverse).filter_labels_ids_in_param(:ids)
expect(result).to eq([group_label_locked.id, project_label_locked.id, group_label.id, project_label.id])
end
end
context 'when parent is a group' do
it 'returns only relevant label ids' do
result = described_class.new(user, group, ids: label_ids).filter_labels_ids_in_param(:ids)
expect(result).to match_array([group_label.id, group_label_locked.id])
end
end
it 'accepts a single id parameter' do
result = described_class.new(user, project, label_id: project_label.id).filter_labels_ids_in_param(:label_id)
expect(result).to match_array([project_label.id])
end
end
describe '#filter_locked_label_ids' do
let(:label_ids) { labels.map(&:id) }
context 'when parent is a project' do
it 'returns only relevant label ids' do
result = described_class.new(user, project, ids: label_ids).filter_locked_label_ids(label_ids)
expect(result).to match_array([project_label_locked.id, group_label_locked.id])
end
end
context 'when parent is a group' do
it 'returns only relevant label ids' do
result = described_class.new(user, group, ids: label_ids).filter_locked_label_ids(label_ids)
expect(result).to match_array([group_label_locked.id])
end
end
end
describe '#available_labels' do
context 'when parent is a project' do
it 'returns only relevant labels' do
result = described_class.new(user, project, {}).available_labels
expect(result.count).to eq(4)
expect(result).to include(project_label, group_label, project_label_locked, group_label_locked)
expect(result).not_to include(other_project_label, other_group_label, other_project_label_locked)
end
end
context 'when parent is a group' do
it 'returns only relevant labels' do
result = described_class.new(user, group, {}).available_labels
expect(result.count).to eq(2)
expect(result).to include(group_label, group_label_locked)
expect(result).not_to include(project_label, other_project_label, other_group_label, project_label_locked, other_project_label_locked)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Labels
class UpdateService < Labels::BaseService
def initialize(params = {})
@params = params.to_h.dup.with_indifferent_access
end
# returns the updated label
def execute(label)
params[:name] = params.delete(:new_name) if params.key?(:new_name)
params[:color] = convert_color_name_to_hex if params[:color].present?
params.delete(:lock_on_merge) unless allow_lock_on_merge?(label)
label.update(params)
label
end
private
def allow_lock_on_merge?(label)
return if label.template?
return unless label.respond_to?(:parent_container)
return unless label.parent_container.supports_lock_on_merge?
# If we've made it here, then we're allowed to turn it on. However, we do _not_
# want to allow it to be turned off. So if it's already set, then don't allow the possibility
# that it could be turned off.
!label.lock_on_merge
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Labels::UpdateService, feature_category: :team_planning do
describe '#execute' do
let(:project) { create(:project) }
let(:hex_color) { '#FF0000' }
let(:named_color) { 'red' }
let(:upcase_color) { 'RED' }
let(:spaced_color) { ' red ' }
let(:unknown_color) { 'unknown' }
let(:no_color) { '' }
let(:expected_saved_color) { ::Gitlab::Color.of(hex_color) }
before do
@label = Labels::CreateService.new(title: 'Initial', color: '#000000').execute(project: project)
expect(@label).to be_persisted
end
context 'with color in hex-code' do
it 'updates the label' do
label = described_class.new(params_with(hex_color)).execute(@label)
expect(label).to be_valid
expect(label.reload.color).to eq expected_saved_color
end
end
context 'with color in allowed name' do
it 'updates the label' do
label = described_class.new(params_with(named_color)).execute(@label)
expect(label).to be_valid
expect(label.reload.color).to eq expected_saved_color
end
end
context 'with color in up-case allowed name' do
it 'updates the label' do
label = described_class.new(params_with(upcase_color)).execute(@label)
expect(label).to be_valid
expect(label.reload.color).to eq expected_saved_color
end
end
context 'with color surrounded by spaces' do
it 'updates the label' do
label = described_class.new(params_with(spaced_color)).execute(@label)
expect(label).to be_valid
expect(label.reload.color).to eq expected_saved_color
end
end
context 'with unknown color' do
it 'doesn\'t update the label' do
label = described_class.new(params_with(unknown_color)).execute(@label)
expect(label).not_to be_valid
end
end
context 'with no color' do
it 'doesn\'t update the label' do
label = described_class.new(params_with(no_color)).execute(@label)
expect(label).not_to be_valid
end
end
describe 'lock_on_merge' do
let_it_be(:params) { { lock_on_merge: true } }
context 'when feature flag is disabled' do
before do
stub_feature_flags(enforce_locked_labels_on_merge: false)
end
it 'does not allow setting lock_on_merge' do
label = described_class.new(params).execute(@label)
expect(label.reload.lock_on_merge).to be_falsey
template_label = Labels::CreateService.new(title: 'Initial').execute(template: true)
label = described_class.new(params).execute(template_label)
expect(label.reload.lock_on_merge).to be_falsey
end
end
context 'when feature flag is enabled' do
it 'allows setting lock_on_merge' do
label = described_class.new(params).execute(@label)
expect(label.reload.lock_on_merge).to be_truthy
end
it 'does not allow lock_on_merge to be unset' do
label_locked = Labels::CreateService.new(title: 'Initial', lock_on_merge: true).execute(project: project)
label = described_class.new(title: 'test', lock_on_merge: false).execute(label_locked)
expect(label.reload.lock_on_merge).to be_truthy
expect(label.reload.title).to eq 'test'
end
it 'does not allow setting lock_on_merge for templates' do
template_label = Labels::CreateService.new(title: 'Initial').execute(template: true)
label = described_class.new(params).execute(template_label)
expect(label.reload.lock_on_merge).to be_falsey
end
end
end
end
def params_with(color)
{
title: 'A Label',
color: color
}
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Labels
class CreateService < Labels::BaseService
def initialize(params = {})
@params = params.to_h.dup.with_indifferent_access
end
# returns the created label
def execute(target_params)
params[:color] = convert_color_name_to_hex if params[:color].present?
project_or_group = target_params[:project] || target_params[:group]
if project_or_group.present?
params.delete(:lock_on_merge) unless project_or_group.supports_lock_on_merge?
project_or_group.labels.create(params)
elsif target_params[:template]
label = Label.new(params)
label.template = true
label.save
label
else
Gitlab::AppLogger.warn("target_params should contain :project or :group or :template, actual value: #{target_params}")
end
end
end
end
Labels::CreateService.prepend_mod_with('Labels::CreateService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Labels::CreateService, feature_category: :team_planning do
describe '#execute' do
let(:project) { create(:project) }
let(:group) { create(:group) }
let(:hex_color) { '#FF0000' }
let(:named_color) { 'red' }
let(:upcase_color) { 'RED' }
let(:spaced_color) { ' red ' }
let(:unknown_color) { 'unknown' }
let(:no_color) { '' }
let(:expected_saved_color) { ::Gitlab::Color.of(hex_color) }
context 'in a project' do
context 'with color in hex-code' do
it 'creates a label' do
label = described_class.new(params_with(hex_color)).execute(project: project)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with color in allowed name' do
it 'creates a label' do
label = described_class.new(params_with(named_color)).execute(project: project)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with color in up-case allowed name' do
it 'creates a label' do
label = described_class.new(params_with(upcase_color)).execute(project: project)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with color surrounded by spaces' do
it 'creates a label' do
label = described_class.new(params_with(spaced_color)).execute(project: project)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with unknown color' do
it 'doesn\'t create a label' do
label = described_class.new(params_with(unknown_color)).execute(project: project)
expect(label).not_to be_persisted
end
end
context 'with no color' do
it 'doesn\'t create a label' do
label = described_class.new(params_with(no_color)).execute(project: project)
expect(label).not_to be_persisted
end
end
end
context 'in a group' do
context 'with color in hex-code' do
it 'creates a label' do
label = described_class.new(params_with(hex_color)).execute(group: group)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with color in allowed name' do
it 'creates a label' do
label = described_class.new(params_with(named_color)).execute(group: group)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with color in up-case allowed name' do
it 'creates a label' do
label = described_class.new(params_with(upcase_color)).execute(group: group)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with color surrounded by spaces' do
it 'creates a label' do
label = described_class.new(params_with(spaced_color)).execute(group: group)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with unknown color' do
it 'doesn\'t create a label' do
label = described_class.new(params_with(unknown_color)).execute(group: group)
expect(label).not_to be_persisted
end
end
context 'with no color' do
it 'doesn\'t create a label' do
label = described_class.new(params_with(no_color)).execute(group: group)
expect(label).not_to be_persisted
end
end
end
context 'in admin area' do
context 'with color in hex-code' do
it 'creates a label' do
label = described_class.new(params_with(hex_color)).execute(template: true)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with color in allowed name' do
it 'creates a label' do
label = described_class.new(params_with(named_color)).execute(template: true)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with color in up-case allowed name' do
it 'creates a label' do
label = described_class.new(params_with(upcase_color)).execute(template: true)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with color surrounded by spaces' do
it 'creates a label' do
label = described_class.new(params_with(spaced_color)).execute(template: true)
expect(label).to be_persisted
expect(label.color).to eq expected_saved_color
end
end
context 'with unknown color' do
it 'doesn\'t create a label' do
label = described_class.new(params_with(unknown_color)).execute(template: true)
expect(label).not_to be_persisted
end
end
context 'with no color' do
it 'doesn\'t create a label' do
label = described_class.new(params_with(no_color)).execute(template: true)
expect(label).not_to be_persisted
end
end
end
describe 'lock_on_merge' do
let_it_be(:params) { { title: 'Locked label', lock_on_merge: true } }
context 'when feature flag is disabled' do
before do
stub_feature_flags(enforce_locked_labels_on_merge: false)
end
it 'does not allow setting lock_on_merge' do
label = described_class.new(params).execute(project: project)
label2 = described_class.new(params).execute(group: group)
label3 = described_class.new(params).execute(template: true)
expect(label.lock_on_merge).to be_falsey
expect(label2.lock_on_merge).to be_falsey
expect(label3).not_to be_persisted
end
end
context 'when feature flag is enabled' do
it 'allows setting lock_on_merge' do
label = described_class.new(params).execute(project: project)
label2 = described_class.new(params).execute(group: group)
expect(label.lock_on_merge).to be_truthy
expect(label2.lock_on_merge).to be_truthy
end
it 'does not alow setting lock_on_merge for templates' do
label = described_class.new(params).execute(template: true)
expect(label).not_to be_persisted
end
end
end
end
def params_with(color)
{
title: 'A Label',
color: color
}
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module IssueEmailParticipants
class CreateService < ::BaseProjectService
MAX_NUMBER_OF_EMAILS = 6
attr_reader :target, :emails
def initialize(target:, current_user:, emails:)
super(project: target.project, current_user: current_user)
@target = target
@emails = emails
end
def execute
return error_feature_flag unless Feature.enabled?(:issue_email_participants, target.project)
return error_underprivileged unless current_user.can?(:"admin_#{target.to_ability_name}", target)
return error_no_participants unless emails.present?
added_emails = add_participants(deduplicate_and_limit_emails)
if added_emails.any?
message = add_system_note(added_emails)
ServiceResponse.success(message: message.upcase_first << ".")
else
error_no_participants
end
end
private
def deduplicate_and_limit_emails
existing_emails = target.email_participants_emails_downcase
# Compare downcase versions, but use the original email
emails.index_by { |email| [email.downcase, email] }.excluding(*existing_emails).each_value
.first(MAX_NUMBER_OF_EMAILS)
end
def add_participants(emails_to_add)
added_emails = []
emails_to_add.each do |email|
new_participant = target.issue_email_participants.create(email: email)
added_emails << email if new_participant.persisted?
end
added_emails
end
def add_system_note(added_emails)
message = format(_("added %{emails}"), emails: added_emails.to_sentence)
::SystemNoteService.add_email_participants(target, project, current_user, message)
message
end
def error(message)
ServiceResponse.error(message: message)
end
def error_feature_flag
# Don't translate feature flag error because it's temporary.
error("Feature flag issue_email_participants is not enabled for this project.")
end
def error_underprivileged
error(_("You don't have permission to add email participants."))
end
def error_no_participants
error(_("No email participants were added. Either none were provided, or they already exist."))
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe IssueEmailParticipants::CreateService, feature_category: :service_desk do
shared_examples 'a successful service execution' do
it 'creates new participants', :aggregate_failures do
expect(response).to be_success
issue.reset
note = issue.notes.last
expect(note.system?).to be true
expect(note.author).to eq(user)
participants_emails = issue.email_participants_emails_downcase
expected_emails.each do |email|
expect(participants_emails).to include(email)
expect(response.message).to include(email)
expect(note.note).to include(email)
end
end
end
shared_examples 'a failed service execution' do
it 'returns error ServiceResponse with message', :aggregate_failures do
expect(response).to be_error
expect(response.message).to eq(error_message)
end
end
describe '#execute' do
let_it_be_with_reload(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, project: project) }
let(:emails) { nil }
let(:service) { described_class.new(target: issue, current_user: user, emails: emails) }
let(:expected_emails) { emails }
let(:error_feature_flag) { "Feature flag issue_email_participants is not enabled for this project." }
let(:error_underprivileged) { _("You don't have permission to add email participants.") }
let(:error_no_participants) do
_("No email participants were added. Either none were provided, or they already exist.")
end
subject(:response) { service.execute }
context 'when the user is not a project member' do
let(:error_message) { error_underprivileged }
it_behaves_like 'a failed service execution'
end
context 'when user has reporter role in project' do
before_all do
project.add_reporter(user)
end
context 'when no emails are provided' do
let(:error_message) { error_no_participants }
it_behaves_like 'a failed service execution'
end
context 'when one email is provided' do
let(:emails) { ['[email protected]'] }
it_behaves_like 'a successful service execution'
context 'when email is already a participant of the issue' do
let(:error_message) { error_no_participants }
before do
issue.issue_email_participants.create!(email: emails.first)
end
it_behaves_like 'a failed service execution'
context 'when email is formatted in a different case' do
let(:emails) { ['[email protected]'] }
it_behaves_like 'a failed service execution'
end
end
end
context 'when multiple emails are provided' do
let(:emails) { ['[email protected]', '[email protected]'] }
it_behaves_like 'a successful service execution'
context 'when duplicate email provided' do
let(:emails) { ['[email protected]', '[email protected]'] }
let(:expected_emails) { emails[...-1] }
it_behaves_like 'a successful service execution'
end
context 'when an email is already a participant of the issue' do
let(:expected_emails) { emails[1...] }
before do
issue.issue_email_participants.create!(email: emails.first)
end
it_behaves_like 'a successful service execution'
end
end
context 'when more than the allowed number of emails a re provided' do
let(:emails) { (1..7).map { |i| "user#{i}@example.com" } }
let(:expected_emails) { emails[...-1] }
it_behaves_like 'a successful service execution'
end
end
context 'when feature flag issue_email_participants is disabled' do
let(:error_message) { error_feature_flag }
before do
stub_feature_flags(issue_email_participants: false)
end
it_behaves_like 'a failed service execution'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module DeployKeys
class CreateService < Keys::BaseService
def execute(project: nil)
DeployKey.create(params.merge(user: user))
end
end
end
DeployKeys::CreateService.prepend_mod_with('DeployKeys::CreateService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe DeployKeys::CreateService, feature_category: :continuous_delivery do
let(:user) { create(:user) }
let(:params) { attributes_for(:deploy_key) }
subject { described_class.new(user, params) }
it "creates a deploy key" do
expect { subject.execute }.to change { DeployKey.where(params.merge(user: user)).count }.by(1)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Uploads
class DestroyService < BaseService
attr_accessor :model, :current_user
def initialize(model, user = nil)
@model = model
@current_user = user
end
def execute(secret, filename)
upload = find_upload(secret, filename)
unless current_user && upload && current_user.can?(:destroy_upload, upload)
return error(_("The resource that you are attempting to access does not "\
"exist or you don't have permission to perform this action."))
end
if upload.destroy
success(upload: upload)
else
error(_('Upload could not be deleted.'))
end
end
private
# rubocop: disable CodeReuse/ActiveRecord
def find_upload(secret, filename)
uploader = uploader_class.new(model, secret: secret)
upload_paths = uploader.upload_paths(filename)
Upload.find_by(model: model, uploader: uploader_class.to_s, path: upload_paths)
rescue FileUploader::InvalidSecret
nil
end
# rubocop: enable CodeReuse/ActiveRecord
def uploader_class
case model
when Group
NamespaceFileUploader
when Project
FileUploader
else
raise ArgumentError, "unknown uploader for #{model.class.name}"
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Uploads::DestroyService, feature_category: :shared do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be_with_reload(:upload) { create(:upload, :issuable_upload, model: project) }
let(:filename) { File.basename(upload.path) }
let(:secret) { upload.secret }
let(:model) { project }
let(:service) { described_class.new(model, user) }
describe '#execute' do
subject { service.execute(secret, filename) }
shared_examples_for 'upload not found' do
it 'does not delete any upload' do
expect { subject }.not_to change { Upload.count }
end
it 'returns an error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq("The resource that you are attempting to access does not "\
"exist or you don't have permission to perform this action.")
end
end
context 'when user is nil' do
let(:user) { nil }
it_behaves_like 'upload not found'
end
context 'when user cannot destroy upload' do
before do
project.add_developer(user)
end
it_behaves_like 'upload not found'
end
context 'when user can destroy upload' do
before do
project.add_maintainer(user)
end
it 'deletes the upload' do
expect { subject }.to change { Upload.count }.by(-1)
end
it 'returns success response' do
expect(subject[:status]).to eq(:success)
expect(subject[:upload]).to eq(upload)
end
context 'when upload is not found' do
let(:filename) { 'not existing filename' }
it_behaves_like 'upload not found'
end
context 'when upload secret is not found' do
let(:secret) { 'aaaaaaaaaa' }
it_behaves_like 'upload not found'
end
context 'when upload secret has invalid format' do
let(:secret) { 'invalid' }
it_behaves_like 'upload not found'
end
context 'when unknown model is used' do
let(:model) { user }
it 'raises an error' do
expect { subject }.to raise_exception(ArgumentError)
end
end
context 'when upload belongs to other model' do
let_it_be(:upload) { create(:upload, :namespace_upload) }
it_behaves_like 'upload not found'
end
context 'when upload destroy fails' do
before do
allow(service).to receive(:find_upload).and_return(upload)
allow(upload).to receive(:destroy).and_return(false)
end
it 'returns error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq('Upload could not be deleted.')
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module ResourceAccessTokens
class RevokeService < BaseService
include Gitlab::Utils::StrongMemoize
RevokeAccessTokenError = Class.new(RuntimeError)
def initialize(current_user, resource, access_token)
@current_user = current_user
@access_token = access_token
@bot_user = access_token.user
@resource = resource
end
def execute
return error("#{current_user.name} cannot delete #{bot_user.name}") unless can_destroy_token?
return error("Failed to find bot user") unless find_member
access_token.revoke!
destroy_bot_user
log_event
success("Access token #{access_token.name} has been revoked and the bot user has been scheduled for deletion.")
rescue StandardError => error
log_error("Failed to revoke access token for #{bot_user.name}: #{error.message}")
error(error.message)
end
private
attr_reader :current_user, :access_token, :bot_user, :resource
def destroy_bot_user
DeleteUserWorker.perform_async(current_user.id, bot_user.id, skip_authorization: true)
end
def can_destroy_token?
%w[project group].include?(resource.class.name.downcase) && can?(current_user, :destroy_resource_access_tokens, resource)
end
def find_member
strong_memoize(:member) do
next false unless resource.is_a?(Project) || resource.is_a?(Group)
resource.member(bot_user)
end
end
def log_event
::Gitlab::AppLogger.info "PROJECT ACCESS TOKEN REVOCATION: revoked_by: #{current_user.username}, project_id: #{resource.id}, token_user: #{access_token.user.name}, token_id: #{access_token.id}"
end
def error(message)
ServiceResponse.error(message: message)
end
def success(message)
ServiceResponse.success(message: message)
end
end
end
ResourceAccessTokens::RevokeService.prepend_mod_with('ResourceAccessTokens::RevokeService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ResourceAccessTokens::RevokeService, feature_category: :system_access do
subject { described_class.new(user, resource, access_token).execute }
let_it_be(:user) { create(:user) }
let_it_be(:user_non_priviledged) { create(:user) }
let_it_be(:resource_bot) { create(:user, :project_bot) }
let(:access_token) { create(:personal_access_token, user: resource_bot) }
describe '#execute', :sidekiq_inline do
shared_examples 'revokes access token' do
it { expect(subject.success?).to be true }
it { expect(subject.message).to eq("Access token #{access_token.name} has been revoked and the bot user has been scheduled for deletion.") }
it 'calls delete user worker' do
expect(DeleteUserWorker).to receive(:perform_async).with(user.id, resource_bot.id, skip_authorization: true)
subject
end
it 'removes membership of bot user' do
subject
expect(resource.reload.users).not_to include(resource_bot)
end
it 'initiates user removal' do
subject
expect(
Users::GhostUserMigration.where(user: resource_bot, initiator_user: user)
).to be_exists
end
it 'logs the event' do
allow(Gitlab::AppLogger).to receive(:info)
subject
expect(Gitlab::AppLogger).to have_received(:info).with("PROJECT ACCESS TOKEN REVOCATION: revoked_by: #{user.username}, project_id: #{resource.id}, token_user: #{resource_bot.name}, token_id: #{access_token.id}")
end
end
shared_examples 'rollback revoke steps' do
it 'does not revoke the access token' do
subject
expect(access_token.reload.revoked?).to be false
end
it 'does not remove bot from member list' do
subject
expect(resource.reload.users).to include(resource_bot)
end
it 'does not transfer issuables of bot user to ghost user' do
issue = create(:issue, author: resource_bot)
subject
expect(issue.reload.author.ghost?).to be false
end
it 'does not destroy project bot user' do
subject
expect(User.exists?(resource_bot.id)).to be_truthy
end
end
shared_examples 'revoke fails' do |resource_type|
let_it_be(:other_user) { create(:user) }
context "when access token does not belong to this #{resource_type}" do
it 'does not find the bot' do
other_access_token = create(:personal_access_token, user: other_user)
response = described_class.new(user, resource, other_access_token).execute
expect(response.success?).to be false
expect(response.message).to eq("Failed to find bot user")
expect(access_token.reload.revoked?).to be false
end
end
context 'when user does not have permission to destroy bot' do
context "when non-#{resource_type} member tries to delete project bot" do
it 'does not allow other user to delete bot' do
response = described_class.new(other_user, resource, access_token).execute
expect(response.success?).to be false
expect(response.message).to eq("#{other_user.name} cannot delete #{access_token.user.name}")
expect(access_token.reload.revoked?).to be false
end
end
context "when non-priviledged #{resource_type} member tries to delete project bot" do
it 'does not allow developer to delete bot' do
response = described_class.new(user_non_priviledged, resource, access_token).execute
expect(response.success?).to be false
expect(response.message).to eq("#{user_non_priviledged.name} cannot delete #{access_token.user.name}")
expect(access_token.reload.revoked?).to be false
end
end
end
context 'when deletion of bot user fails' do
before do
allow_next_instance_of(::ResourceAccessTokens::RevokeService) do |service|
allow(service).to receive(:execute).and_return(false)
end
end
it_behaves_like 'rollback revoke steps'
end
end
context 'when resource is a project' do
let_it_be(:resource) { create(:project, :private) }
before do
resource.add_maintainer(user)
resource.add_developer(user_non_priviledged)
resource.add_maintainer(resource_bot)
end
it_behaves_like 'revokes access token'
it_behaves_like 'revoke fails', 'project'
end
context 'when resource is a group' do
let_it_be(:resource) { create(:group, :private) }
before do
resource.add_owner(user)
resource.add_maintainer(user_non_priviledged)
resource.add_maintainer(resource_bot)
end
it_behaves_like 'revokes access token'
it_behaves_like 'revoke fails', 'group'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module ResourceAccessTokens
class CreateService < BaseService
include Gitlab::Utils::StrongMemoize
def initialize(current_user, resource, params = {})
@resource_type = resource.class.name.downcase
@resource = resource
@current_user = current_user
@params = params.dup
end
def execute
return error("User does not have permission to create #{resource_type} access token") unless has_permission_to_create?
access_level = params[:access_level] || Gitlab::Access::MAINTAINER
return error("Could not provision owner access to project access token") if do_not_allow_owner_access_level_for_project_bot?(access_level)
return error("Access level of the token can't be greater the access level of the user who created the token") unless validate_access_level(access_level)
return error(s_('AccessTokens|Access token limit reached')) if reached_access_token_limit?
user = create_user
return error(user.errors.full_messages.to_sentence) unless user.persisted?
user.update!(external: true) if current_user.external?
member = create_membership(resource, user, access_level)
unless member.persisted?
delete_failed_user(user)
return error("Could not provision #{Gitlab::Access.human_access(access_level.to_i).downcase} access to the access token. ERROR: #{member.errors.full_messages.to_sentence}")
end
token_response = create_personal_access_token(user)
if token_response.success?
log_event(token_response.payload[:personal_access_token])
success(token_response.payload[:personal_access_token])
else
delete_failed_user(user)
error(token_response.message)
end
end
private
attr_reader :resource_type, :resource
def reached_access_token_limit?
false
end
def username_and_email_generator
Gitlab::Utils::UsernameAndEmailGenerator.new(
username_prefix: "#{resource_type}_#{resource.id}_bot",
email_domain: "noreply.#{Gitlab.config.gitlab.host}"
)
end
strong_memoize_attr :username_and_email_generator
def has_permission_to_create?
%w[project group].include?(resource_type) && can?(current_user, :create_resource_access_tokens, resource)
end
def create_user
# Even project maintainers/owners can create project access tokens, which in turn
# creates a bot user, and so it becomes necessary to have `skip_authorization: true`
# since someone like a project maintainer/owner does not inherently have the ability
# to create a new user in the system.
::Users::AuthorizedCreateService.new(current_user, default_user_params).execute
end
def delete_failed_user(user)
DeleteUserWorker.perform_async(current_user.id, user.id, hard_delete: true, skip_authorization: true)
end
def default_user_params
{
name: params[:name] || "#{resource.name.to_s.humanize} bot",
email: username_and_email_generator.email,
username: username_and_email_generator.username,
user_type: :project_bot,
skip_confirmation: true # Bot users should always have their emails confirmed.
}
end
def create_personal_access_token(user)
PersonalAccessTokens::CreateService.new(
current_user: user, target_user: user, params: personal_access_token_params
).execute
end
def personal_access_token_params
{
name: params[:name] || "#{resource_type}_bot",
impersonation: false,
scopes: params[:scopes] || default_scopes,
expires_at: pat_expiration
}
end
def default_scopes
Gitlab::Auth.resource_bot_scopes
end
def create_membership(resource, user, access_level)
resource.add_member(user, access_level, expires_at: pat_expiration)
end
def pat_expiration
params[:expires_at].presence || PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now
end
def log_event(token)
::Gitlab::AppLogger.info "PROJECT ACCESS TOKEN CREATION: created_by: #{current_user.username}, project_id: #{resource.id}, token_user: #{token.user.name}, token_id: #{token.id}"
end
def error(message)
ServiceResponse.error(message: message)
end
def success(access_token)
ServiceResponse.success(payload: { access_token: access_token })
end
def validate_access_level(access_level)
return true unless resource.is_a?(Project)
return true if current_user.bot?
return true if current_user.can?(:manage_owners, resource)
current_user.authorized_project?(resource, access_level.to_i)
end
def do_not_allow_owner_access_level_for_project_bot?(access_level)
resource.is_a?(Project) &&
access_level.to_i == Gitlab::Access::OWNER &&
!current_user.can?(:manage_owners, resource)
end
end
end
ResourceAccessTokens::CreateService.prepend_mod_with('ResourceAccessTokens::CreateService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ResourceAccessTokens::CreateService, feature_category: :system_access do
subject { described_class.new(user, resource, params).execute }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:group) { create(:group, :private) }
let_it_be(:params) { {} }
let_it_be(:max_pat_access_token_lifetime) do
PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS.days.from_now.to_date.freeze
end
before do
stub_config_setting(host: 'example.com')
end
describe '#execute' do
shared_examples 'token creation fails' do
let(:resource) { create(:project) }
it 'does not add the project bot as a member' do
expect { subject }.not_to change { resource.members.count }
end
it 'immediately destroys the bot user if one was created', :sidekiq_inline do
expect { subject }.not_to change { User.bots.count }
end
end
shared_examples 'correct error message' do
it 'returns correct error message' do
expect(subject.error?).to be true
expect(subject.errors).to include(error_message)
end
end
shared_examples 'allows creation of bot with valid params' do
it { expect { subject }.to change { User.count }.by(1) }
it 'creates resource bot user' do
response = subject
access_token = response.payload[:access_token]
expect(access_token.user.reload.user_type).to eq("project_bot")
expect(access_token.user.created_by_id).to eq(user.id)
end
context 'email confirmation status' do
shared_examples_for 'creates a user that has their email confirmed' do
it 'creates a user that has their email confirmed' do
response = subject
access_token = response.payload[:access_token]
expect(access_token.user.reload.confirmed?).to eq(true)
end
end
context 'when created by an admin' do
let(:user) { create(:admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it_behaves_like 'creates a user that has their email confirmed'
end
context 'when admin mode is disabled' do
it 'returns error' do
response = subject
expect(response.error?).to be true
end
end
end
context 'when created by a non-admin' do
it_behaves_like 'creates a user that has their email confirmed'
end
end
context 'bot name' do
context 'when no name is passed' do
it 'uses default name' do
response = subject
access_token = response.payload[:access_token]
expect(access_token.user.name).to eq("#{resource.name.to_s.humanize} bot")
end
end
context 'when user provides name' do
let_it_be(:params) { { name: 'Random bot' } }
it 'overrides the default name value' do
response = subject
access_token = response.payload[:access_token]
expect(access_token.user.name).to eq(params[:name])
end
end
end
context 'bot username and email' do
include_examples 'username and email pair is generated by Gitlab::Utils::UsernameAndEmailGenerator' do
subject do
response = described_class.new(user, resource, params).execute
response.payload[:access_token].user
end
let(:username_prefix) do
"#{resource.class.name.downcase}_#{resource.id}_bot"
end
let(:email_domain) do
"noreply.#{Gitlab.config.gitlab.host}"
end
end
end
context 'access level' do
context 'when user does not specify an access level' do
it 'adds the bot user as a maintainer in the resource' do
response = subject
access_token = response.payload[:access_token]
bot_user = access_token.user
expect(resource.members.maintainers.map(&:user_id)).to include(bot_user.id)
end
end
shared_examples 'bot with access level' do
it 'adds the bot user with the specified access level in the resource' do
response = subject
access_token = response.payload[:access_token]
bot_user = access_token.user
expect(resource.members.developers.map(&:user_id)).to include(bot_user.id)
end
end
context 'when user specifies an access level' do
let_it_be(:params) { { access_level: Gitlab::Access::DEVELOPER } }
it_behaves_like 'bot with access level'
end
context 'with DEVELOPER access_level, in string format' do
let_it_be(:params) { { access_level: Gitlab::Access::DEVELOPER.to_s } }
it_behaves_like 'bot with access level'
end
context 'when user is external' do
before do
user.update!(external: true)
end
it 'creates resource bot user with external status' do
expect(subject.payload[:access_token].user.external).to eq true
end
end
end
context 'personal access token' do
it { expect { subject }.to change { PersonalAccessToken.count }.by(1) }
context 'when user does not provide scope' do
it 'has default scopes' do
response = subject
access_token = response.payload[:access_token]
expect(access_token.scopes).to eq(Gitlab::Auth.resource_bot_scopes)
end
end
context 'when user provides scope explicitly' do
let_it_be(:params) { { scopes: Gitlab::Auth::REPOSITORY_SCOPES } }
it 'overrides the default scope value' do
response = subject
access_token = response.payload[:access_token]
expect(access_token.scopes).to eq(Gitlab::Auth::REPOSITORY_SCOPES)
end
end
context 'expires_at' do
context 'when no expiration value is passed' do
it 'defaults to PersonalAccessToken::MAX_PERSONAL_ACCESS_TOKEN_LIFETIME_IN_DAYS' do
freeze_time do
response = subject
access_token = response.payload[:access_token]
expect(access_token.expires_at).to eq(
max_pat_access_token_lifetime.to_date
)
end
end
it 'project bot membership expires when PAT expires' do
response = subject
access_token = response.payload[:access_token]
project_bot = access_token.user
expect(resource.members.find_by(user_id: project_bot.id).expires_at).to eq(
max_pat_access_token_lifetime.to_date
)
end
end
context 'when user provides expiration value' do
let_it_be(:params) { { expires_at: Date.today + 1.month } }
it 'overrides the default expiration value' do
response = subject
access_token = response.payload[:access_token]
expect(access_token.expires_at).to eq(params[:expires_at])
end
context 'expiry of the project bot member' do
it 'sets the project bot to expire on the same day as the token' do
response = subject
access_token = response.payload[:access_token]
project_bot = access_token.user
expect(resource.members.find_by(user_id: project_bot.id).expires_at).to eq(access_token.expires_at)
end
end
end
context 'when invalid scope is passed' do
let(:error_message) { 'Scopes can only contain available scopes' }
let_it_be(:params) { { scopes: [:invalid_scope] } }
it_behaves_like 'token creation fails'
it_behaves_like 'correct error message'
end
end
context "when access provisioning fails" do
let_it_be(:bot_user) { create(:user, :project_bot) }
let(:unpersisted_member) { build(:project_member, source: resource, user: bot_user) }
let(:error_message) { 'Could not provision maintainer access to the access token. ERROR: error message' }
before do
allow_next_instance_of(ResourceAccessTokens::CreateService) do |service|
allow(service).to receive(:create_user).and_return(bot_user)
allow(service).to receive(:create_membership).and_return(unpersisted_member)
end
allow(unpersisted_member).to receive_message_chain(:errors, :full_messages, :to_sentence)
.and_return('error message')
end
context 'with MAINTAINER access_level, in integer format' do
let_it_be(:params) { { access_level: Gitlab::Access::MAINTAINER } }
it_behaves_like 'token creation fails'
it_behaves_like 'correct error message'
end
context 'with MAINTAINER access_level, in string format' do
let_it_be(:params) { { access_level: Gitlab::Access::MAINTAINER.to_s } }
it_behaves_like 'token creation fails'
it_behaves_like 'correct error message'
end
end
end
it 'logs the event' do
allow(Gitlab::AppLogger).to receive(:info)
response = subject
expect(Gitlab::AppLogger).to have_received(:info).with(/PROJECT ACCESS TOKEN CREATION: created_by: #{user.username}, project_id: #{resource.id}, token_user: #{response.payload[:access_token].user.name}, token_id: \d+/)
end
end
shared_examples 'when user does not have permission to create a resource bot' do
let(:error_message) { "User does not have permission to create #{resource_type} access token" }
it_behaves_like 'token creation fails'
it_behaves_like 'correct error message'
end
context 'when resource is a project' do
let_it_be(:resource_type) { 'project' }
let_it_be(:resource) { project }
it_behaves_like 'when user does not have permission to create a resource bot'
context 'user with valid permission' do
before_all do
resource.add_maintainer(user)
end
it_behaves_like 'allows creation of bot with valid params'
context 'when user specifies an access level of OWNER for the bot' do
let_it_be(:params) { { access_level: Gitlab::Access::OWNER } }
context 'when the executor is a MAINTAINER' do
let(:error_message) { 'Could not provision owner access to project access token' }
context 'with OWNER access_level, in integer format' do
it_behaves_like 'token creation fails'
it_behaves_like 'correct error message'
end
context 'with OWNER access_level, in string format' do
let(:error_message) { 'Could not provision owner access to project access token' }
let_it_be(:params) { { access_level: Gitlab::Access::OWNER.to_s } }
it_behaves_like 'token creation fails'
it_behaves_like 'correct error message'
end
end
context 'when the executor is an OWNER' do
let_it_be(:user) { project.first_owner }
it 'adds the bot user with the specified access level in the resource' do
response = subject
access_token = response.payload[:access_token]
bot_user = access_token.user
expect(resource.members.owners.map(&:user_id)).to include(bot_user.id)
end
end
end
end
end
context 'when resource is a group' do
let_it_be(:resource_type) { 'group' }
let_it_be(:resource) { group }
it_behaves_like 'when user does not have permission to create a resource bot'
context 'user with valid permission' do
before_all do
resource.add_owner(user)
end
it_behaves_like 'allows creation of bot with valid params'
context 'when user specifies an access level of OWNER for the bot' do
let_it_be(:params) { { access_level: Gitlab::Access::OWNER } }
it 'adds the bot user with the specified access level in the resource' do
response = subject
access_token = response.payload[:access_token]
bot_user = access_token.user
expect(resource.members.owners.map(&:user_id)).to include(bot_user.id)
end
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Repositories
class ReplicateService < Repositories::BaseService
Error = Class.new(StandardError)
def execute(new_repository, type)
new_repository.replicate(repository)
new_checksum = new_repository.checksum
checksum = repository.checksum
return if new_checksum == checksum
raise Error, format(s_(
'ReplicateService|Failed to verify %{type} repository checksum from %{old} to %{new}'
), type: type, old: checksum, new: new_checksum)
rescue StandardError => e
new_repository.remove
raise e
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Repositories::ReplicateService, feature_category: :source_code_management do
let(:new_checksum) { 'match' }
let(:repository) { instance_double('Gitlab::Git::Repository', checksum: 'match') }
let(:new_repository) { instance_double('Gitlab::Git::Repository', checksum: new_checksum) }
subject { described_class.new(repository) }
it 'replicates repository' do
expect(new_repository).to receive(:replicate).with(repository)
expect(new_repository).not_to receive(:remove)
expect { subject.execute(new_repository, :project) }.not_to raise_error
end
context 'when checksum does not match' do
let(:new_checksum) { 'does not match' }
it 'raises an error and removes new repository' do
expect(new_repository).to receive(:replicate).with(repository)
expect(new_repository).to receive(:remove)
expect do
subject.execute(new_repository, :project)
end.to raise_error(described_class::Error, /Failed to verify project repository/)
end
end
context 'when an error is raised during checksum calculation' do
it 'raises the error and removes new repository' do
error = StandardError.new
expect(new_repository).to receive(:replicate).with(repository)
expect(new_repository).to receive(:checksum).and_raise(error)
expect(new_repository).to receive(:remove)
expect do
subject.execute(new_repository, :project)
end.to raise_error(error)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Used for git housekeeping
#
# Ex.
# Repositories::HousekeepingService.new(project).execute
# Repositories::HousekeepingService.new(project.wiki).execute
#
module Repositories
class HousekeepingService < BaseService
# Timeout set to 24h
LEASE_TIMEOUT = 86400
GC_PERIOD = 200
class LeaseTaken < StandardError
def to_s
"Somebody already triggered housekeeping for this resource in the past #{LEASE_TIMEOUT / 60} minutes"
end
end
def initialize(resource, task = nil)
@resource = resource
@task = task
end
def execute
lease_uuid = try_obtain_lease
raise LeaseTaken unless lease_uuid.present?
yield if block_given?
execute_gitlab_shell_gc(lease_uuid)
end
def needed?
pushes_since_gc > 0 && period_match? && housekeeping_enabled?
end
def increment!
Gitlab::Metrics.measure(:increment_pushes_since_gc) do
@resource.increment_pushes_since_gc
end
end
private
def execute_gitlab_shell_gc(lease_uuid)
@resource.git_garbage_collect_worker_klass.perform_async(@resource.id, task, lease_key, lease_uuid)
ensure
if pushes_since_gc >= gc_period
Gitlab::Metrics.measure(:reset_pushes_since_gc) do
@resource.reset_pushes_since_gc
end
end
end
def try_obtain_lease
Gitlab::Metrics.measure(:obtain_housekeeping_lease) do
lease = ::Gitlab::ExclusiveLease.new(lease_key, timeout: LEASE_TIMEOUT)
lease.try_obtain
end
end
def lease_key
"#{@resource.class.name.underscore.pluralize}_housekeeping:#{@resource.id}"
end
def pushes_since_gc
@resource.pushes_since_gc
end
def task
return @task if @task
if pushes_since_gc % gc_period == 0
:gc
else
:incremental_repack
end
end
def period_match?
[gc_period, repack_period].any? { |period| pushes_since_gc % period == 0 }
end
def housekeeping_enabled?
Gitlab::CurrentSettings.housekeeping_enabled
end
def gc_period
GC_PERIOD
end
def repack_period
Gitlab::CurrentSettings.housekeeping_incremental_repack_period
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Repositories::HousekeepingService, feature_category: :source_code_management do
it_behaves_like 'housekeeps repository' do
let_it_be(:resource) { create(:project, :repository) }
end
it_behaves_like 'housekeeps repository' do
let_it_be(:project) { create(:project, :wiki_repo) }
let_it_be(:resource) { project.wiki }
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Repositories
# A service class for generating a changelog section.
class ChangelogService
DEFAULT_TRAILER = 'Changelog'
DEFAULT_FILE = 'CHANGELOG.md'
# The maximum number of commits allowed to fetch in `from` and `to` range.
#
# This value is arbitrarily chosen. Increasing it means more Gitaly calls
# and more presure on Gitaly services.
#
# This number is 3x of the average number of commits per GitLab releases.
# Some examples for GitLab's own releases:
#
# * 13.6.0: 4636 commits
# * 13.5.0: 5912 commits
# * 13.4.0: 5541 commits
COMMITS_LIMIT = 15_000
# The `project` specifies the `Project` to generate the changelog section
# for.
#
# The `user` argument specifies a `User` to use for committing the changes
# to the Git repository.
#
# The `version` arguments must be a version `String` using semantic
# versioning as the format.
#
# The arguments `from` and `to` must specify a Git ref or SHA to use for
# fetching the commits to include in the changelog. The SHA/ref set in the
# `from` argument isn't included in the list.
#
# The `date` argument specifies the date of the release, and defaults to the
# current time/date.
#
# The `branch` argument specifies the branch to commit the changes to. The
# branch must already exist.
#
# The `trailer` argument is the Git trailer to use for determining what
# commits to include in the changelog.
#
# The `config_file` arguments specifies the path to the configuration file as
# stored in the project's Git repository.
#
# The `file` arguments specifies the name/path of the file to commit the
# changes to. If the file doesn't exist, it's created automatically.
#
# The `message` argument specifies the commit message to use when committing
# the changelog changes.
#
# rubocop: disable Metrics/ParameterLists
def initialize(
project,
user,
version:,
branch: project.default_branch_or_main,
from: nil,
to: branch,
date: DateTime.now,
trailer: DEFAULT_TRAILER,
config_file: Gitlab::Changelog::Config::DEFAULT_FILE_PATH,
file: DEFAULT_FILE,
message: "Add changelog for version #{version}"
)
@project = project
@user = user
@version = version
@from = from
@to = to
@date = date
@branch = branch
@trailer = trailer
@config_file = config_file
@file = file
@message = message
end
# rubocop: enable Metrics/ParameterLists
def execute(commit_to_changelog: true)
config = Gitlab::Changelog::Config.from_git(@project, @user, @config_file)
from = start_of_commit_range(config)
# For every entry we want to only include the merge request that
# originally introduced the commit, which is the oldest merge request that
# contains the commit. We fetch there merge requests in batches, reducing
# the number of SQL queries needed to get this data.
mrs_finder = MergeRequests::OldestPerCommitFinder.new(@project)
release = Gitlab::Changelog::Release
.new(version: @version, date: @date, config: config)
commits =
ChangelogCommitsFinder.new(project: @project, from: from, to: @to)
verify_commit_range!(from, @to)
commits.each_page(@trailer) do |page|
mrs = mrs_finder.execute(page)
# Preload the authors. This ensures we only need a single SQL query per
# batch of commits, instead of needing a query for every commit.
page.each(&:lazy_author)
# Preload author permissions
@project.team.max_member_access_for_user_ids(page.map(&:author).compact.map(&:id))
page.each do |commit|
release.add_entry(
title: commit.title,
commit: commit,
category: commit.trailers.fetch(@trailer),
author: commit.author,
merge_request: mrs[commit.id]
)
end
end
if commit_to_changelog
Gitlab::Changelog::Committer
.new(@project, @user)
.commit(release: release, file: @file, branch: @branch, message: @message)
else
Gitlab::Changelog::Generator.new.add(release)
end
end
def start_of_commit_range(config)
return @from if @from
finder = ChangelogTagFinder.new(@project, regex: config.tag_regex)
if (prev_tag = finder.execute(@version))
return prev_tag.target_commit.id
end
raise(
Gitlab::Changelog::Error,
'The commit start range is unspecified, and no previous tag ' \
'could be found to use instead'
)
end
def verify_commit_range!(from, to)
return unless Feature.enabled?(:changelog_commits_limitation, @project)
commits = @project.repository.commits_by(oids: [from, to])
raise Gitlab::Changelog::Error, "Invalid or not found commit value in the given range" unless commits.count == 2
_, commits_count = @project.repository.diverging_commit_count(from, to)
if commits_count > COMMITS_LIMIT
raise Gitlab::Changelog::Error, "The commits range exceeds #{COMMITS_LIMIT} elements."
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Repositories::ChangelogService, feature_category: :source_code_management do
describe '#execute' do
let!(:project) { create(:project, :empty_repo) }
let!(:creator) { project.creator }
let!(:author1) { create(:user) }
let!(:author2) { create(:user) }
let!(:mr1) { create(:merge_request, :merged, target_project: project) }
let!(:mr2) { create(:merge_request, :merged, target_project: project) }
# The range of commits ignores the first commit, but includes the last
# commit. To ensure both the commits below are included, we must create an
# extra commit.
#
# In the real world, the start commit of the range will be the last commit
# of the previous release, so ignoring that is expected and desired.
let!(:sha1) do
create_commit(
project,
creator,
commit_message: 'Initial commit',
actions: [{ action: 'create', content: 'test', file_path: 'README.md' }]
)
end
let!(:sha2) do
project.add_maintainer(author1)
create_commit(
project,
author1,
commit_message: "Title 1\n\nChangelog: feature",
actions: [{ action: 'create', content: 'foo', file_path: 'a.txt' }]
)
end
let!(:sha3) do
project.add_maintainer(author2)
create_commit(
project,
author2,
commit_message: "Title 2\n\nChangelog: feature",
actions: [{ action: 'create', content: 'bar', file_path: 'b.txt' }]
)
end
let!(:sha4) do
create_commit(
project,
author2,
commit_message: "Title 3\n\nChangelog: feature",
actions: [{ action: 'create', content: 'bar', file_path: 'c.txt' }]
)
end
let!(:commit1) { project.commit(sha2) }
let!(:commit2) { project.commit(sha3) }
let!(:commit3) { project.commit(sha4) }
let(:commit_to_changelog) { true }
it 'generates and commits a changelog section' do
allow(MergeRequestDiffCommit)
.to receive(:oldest_merge_request_id_per_commit)
.with(project.id, [commit2.id, commit1.id])
.and_return(
[
{ sha: sha2, merge_request_id: mr1.id },
{ sha: sha3, merge_request_id: mr2.id }
])
service = described_class
.new(project, creator, version: '1.0.0', from: sha1, to: sha3)
recorder = ActiveRecord::QueryRecorder.new { service.execute(commit_to_changelog: commit_to_changelog) }
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
expect(recorder.count).to eq(12)
expect(changelog).to include('Title 1', 'Title 2')
end
it "ignores a commit when it's both added and reverted in the same range" do
create_commit(
project,
author2,
commit_message: "Title 4\n\nThis reverts commit #{sha4}",
actions: [{ action: 'create', content: 'bar', file_path: 'd.txt' }]
)
described_class
.new(project, creator, version: '1.0.0', from: sha1)
.execute(commit_to_changelog: commit_to_changelog)
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
expect(changelog).to include('Title 1', 'Title 2')
expect(changelog).not_to include('Title 3', 'Title 4')
end
it 'includes a revert commit when it has a trailer' do
create_commit(
project,
author2,
commit_message: "Title 4\n\nThis reverts commit #{sha4}\n\nChangelog: added",
actions: [{ action: 'create', content: 'bar', file_path: 'd.txt' }]
)
described_class
.new(project, creator, version: '1.0.0', from: sha1)
.execute(commit_to_changelog: commit_to_changelog)
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
expect(changelog).to include('Title 1', 'Title 2', 'Title 4')
expect(changelog).not_to include('Title 3')
end
it 'uses the target branch when "to" is unspecified' do
described_class
.new(project, creator, version: '1.0.0', from: sha1)
.execute(commit_to_changelog: commit_to_changelog)
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
expect(changelog).to include('Title 1', 'Title 2', 'Title 3')
end
describe 'with commit_to_changelog: false' do
let(:commit_to_changelog) { false }
it 'generates changelog section' do
allow(MergeRequestDiffCommit)
.to receive(:oldest_merge_request_id_per_commit)
.with(project.id, [commit2.id, commit1.id])
.and_return(
[
{ sha: sha2, merge_request_id: mr1.id },
{ sha: sha3, merge_request_id: mr2.id }
])
service = described_class
.new(project, creator, version: '1.0.0', from: sha1, to: sha3)
changelog = service.execute(commit_to_changelog: commit_to_changelog)
expect(changelog).to include('Title 1', 'Title 2')
end
end
it 'avoids N+1 queries', :request_store do
RequestStore.clear!
request = ->(to) do
described_class
.new(project, creator, version: '1.0.0', from: sha1, to: to)
.execute(commit_to_changelog: false)
end
control = ActiveRecord::QueryRecorder.new { request.call(sha2) }
RequestStore.clear!
expect { request.call(sha3) }.not_to exceed_query_limit(control.count)
end
context 'when one of commits does not exist' do
let(:service) { described_class.new(project, creator, version: '1.0.0', from: 'master', to: '54321') }
it 'raises an exception' do
expect { service.execute(commit_to_changelog: false) }.to raise_error(Gitlab::Changelog::Error)
end
end
context 'when commit range exceeds the limit' do
let(:service) { described_class.new(project, creator, version: '1.0.0', from: sha1) }
before do
stub_const("#{described_class.name}::COMMITS_LIMIT", 2)
end
it 'raises an exception' do
expect { service.execute(commit_to_changelog: false) }.to raise_error(Gitlab::Changelog::Error)
end
context 'when feature flag is off' do
before do
stub_feature_flags(changelog_commits_limitation: false)
end
it 'returns the changelog' do
expect(service.execute(commit_to_changelog: false)).to include('Title 1', 'Title 2', 'Title 3')
end
end
end
context 'with specified changelog config file path' do
it 'return specified changelog content' do
config = Gitlab::Changelog::Config.from_hash(project, { 'template' => 'specified_changelog_content' }, creator)
allow(Gitlab::Changelog::Config)
.to receive(:from_git)
.with(project, creator, 'specified_changelog_config.yml')
.and_return(config)
described_class
.new(project, creator, version: '1.0.0', from: sha1, config_file: 'specified_changelog_config.yml')
.execute(commit_to_changelog: commit_to_changelog)
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
expect(changelog).to include('specified_changelog_content')
end
end
end
describe '#start_of_commit_range' do
let(:project) { build_stubbed(:project) }
let(:user) { build_stubbed(:user) }
let(:config) { Gitlab::Changelog::Config.new(project) }
context 'when the "from" argument is specified' do
it 'returns the value of the argument' do
service = described_class
.new(project, user, version: '1.0.0', from: 'foo', to: 'bar')
expect(service.start_of_commit_range(config)).to eq('foo')
end
end
context 'when the "from" argument is unspecified' do
it 'returns the tag commit of the previous version' do
service = described_class
.new(project, user, version: '1.0.0', to: 'bar')
finder_spy = instance_spy(Repositories::ChangelogTagFinder)
tag = double(:tag, target_commit: double(:commit, id: '123'))
allow(Repositories::ChangelogTagFinder)
.to receive(:new)
.with(project, regex: an_instance_of(String))
.and_return(finder_spy)
allow(finder_spy)
.to receive(:execute)
.with('1.0.0')
.and_return(tag)
expect(service.start_of_commit_range(config)).to eq('123')
end
it 'raises an error when no tag is found' do
service = described_class
.new(project, user, version: '1.0.0', to: 'bar')
finder_spy = instance_spy(Repositories::ChangelogTagFinder)
allow(Repositories::ChangelogTagFinder)
.to receive(:new)
.with(project, regex: an_instance_of(String))
.and_return(finder_spy)
allow(finder_spy)
.to receive(:execute)
.with('1.0.0')
.and_return(nil)
expect { service.start_of_commit_range(config) }
.to raise_error(Gitlab::Changelog::Error)
end
end
end
def create_commit(project, user, params)
params = { start_branch: 'master', branch_name: 'master' }.merge(params)
Files::MultiService.new(project, user, params).execute.fetch(:result)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class Repositories::DestroyService < Repositories::BaseService
def execute
return success unless repository
return success unless repo_exists?(disk_path)
# Flush the cache for both repositories. This has to be done _before_
# removing the physical repositories as some expiration code depends on
# Git data (e.g. a list of branch names).
ignore_git_errors { repository.before_delete }
# Use variables that aren't methods on Project, because they are used in a callback
current_storage = repository.shard
current_path = "#{disk_path}.git"
# Because #remove happens inside a run_after_commit callback it will
# never be triggered on a read-only instance.
#
# Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/223272
if Gitlab::Database.read_only?
Gitlab::Git::Repository.new(current_storage, current_path, nil, nil).remove
else
container.run_after_commit do
Gitlab::Git::Repository.new(current_storage, current_path, nil, nil).remove
end
end
log_info("Repository \"#{full_path}\" was removed")
success
rescue Gitlab::Git::Repository::NoRepository
success
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Repositories::DestroyService, feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
let!(:project) { create(:project, :repository, namespace: user.namespace) }
let(:repository) { project.repository }
let(:path) { repository.disk_path }
subject { described_class.new(repository).execute }
it 'removes the repository' do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
subject
# Because the removal happens inside a run_after_commit callback we need to
# trigger the callback
project.touch
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
context 'on a read-only instance' do
before do
allow(Gitlab::Database).to receive(:read_only?).and_return(true)
end
it 'schedules the repository deletion' do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
subject
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
end
it 'flushes the repository cache' do
expect(repository).to receive(:before_delete)
subject
end
it 'does not perform any action if repository path does not exist and returns success' do
expect(repository).to receive(:disk_path).and_return('foo')
expect(repository).not_to receive(:before_delete)
expect(subject[:status]).to eq :success
end
it 'gracefully handles exception if the repository does not exist on disk' do
expect(repository).to receive(:before_delete).and_raise(Gitlab::Git::Repository::NoRepository)
expect(subject[:status]).to eq :success
end
context 'with a project wiki repository' do
let(:project) { create(:project, :wiki_repo) }
let(:repository) { project.wiki.repository }
it 'schedules the repository deletion' do
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_truthy
subject
# Because the removal happens inside a run_after_commit callback we need to
# trigger the callback
project.touch
expect(project.gitlab_shell.repository_exists?(project.repository_storage, path + '.git')).to be_falsey
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
# Service class for counting and caching the number of open merge requests of a group.
class MergeRequestsCountService < Groups::CountService
private
def cache_key_name
'open_merge_requests_count'
end
def relation_for_count
MergeRequestsFinder
.new(user, group_id: group.id, state: 'opened', non_archived: true, include_subgroups: true)
.execute
end
def issuable_key
'open_merge_requests'
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::MergeRequestsCountService, :use_clean_rails_memory_store_caching, feature_category: :groups_and_projects do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :repository, namespace: group) }
let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
subject { described_class.new(group, user) }
describe '#relation_for_count' do
before do
group.add_reporter(user)
allow(MergeRequestsFinder).to receive(:new).and_call_original
end
it 'uses the MergeRequestsFinder to scope merge requests' do
expect(MergeRequestsFinder)
.to receive(:new)
.with(user, group_id: group.id, state: 'opened', non_archived: true, include_subgroups: true)
subject.count
end
end
it_behaves_like 'a counter caching service with threshold'
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
class AutocompleteService < Groups::BaseService
include LabelsAsHash
# rubocop: disable CodeReuse/ActiveRecord
def issues(confidential_only: false, issue_types: nil)
finder_params = { group_id: group.id, include_subgroups: true, state: 'opened' }
finder_params[:confidential] = true if confidential_only.present?
finder_params[:issue_types] = issue_types if issue_types.present?
IssuesFinder.new(current_user, finder_params)
.execute
.preload(project: :namespace)
.select(:iid, :title, :project_id, :namespace_id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def merge_requests
MergeRequestsFinder.new(current_user, group_id: group.id, include_subgroups: true, state: 'opened')
.execute
.preload(target_project: :namespace)
.select(:iid, :title, :target_project_id)
end
# rubocop: enable CodeReuse/ActiveRecord
# rubocop: disable CodeReuse/ActiveRecord
def milestones
group_ids = group.self_and_ancestors.public_or_visible_to_user(current_user).pluck(:id)
MilestonesFinder.new(group_ids: group_ids).execute.select(:iid, :title, :due_date)
end
# rubocop: enable CodeReuse/ActiveRecord
def labels_as_hash(target)
super(target, group_id: group.id, only_group_labels: true, include_ancestor_groups: true)
end
def commands(noteable)
return [] unless noteable
QuickActions::InterpretService.new(nil, current_user).available_commands(noteable)
end
end
end
Groups::AutocompleteService.prepend_mod
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::AutocompleteService, feature_category: :groups_and_projects do
let_it_be(:group, refind: true) { create(:group, :nested, :private, avatar: fixture_file_upload('spec/fixtures/dk.png')) }
let_it_be(:sub_group) { create(:group, :private, parent: group) }
let(:user) { create(:user) }
subject { described_class.new(group, user) }
before do
group.add_developer(user)
end
def expect_labels_to_equal(labels, expected_labels)
extract_title = lambda { |label| label['title'] }
expect(labels.map(&extract_title)).to match_array(expected_labels.map(&extract_title))
end
describe '#labels_as_hash' do
let!(:label1) { create(:group_label, group: group) }
let!(:label2) { create(:group_label, group: group) }
let!(:sub_group_label) { create(:group_label, group: sub_group) }
let!(:parent_group_label) { create(:group_label, group: group.parent) }
it 'returns labels from own group and ancestor groups' do
results = subject.labels_as_hash(nil)
expected_labels = [label1, label2, parent_group_label]
expect_labels_to_equal(results, expected_labels)
end
end
describe '#issues' do
let(:project) { create(:project, group: group) }
let(:sub_group_project) { create(:project, group: sub_group) }
let!(:project_issue) { create(:issue, project: project) }
let!(:sub_group_project_issue) { create(:issue, confidential: true, project: sub_group_project) }
it 'returns issues in group and subgroups' do
issues = subject.issues
expect(issues.map(&:iid)).to contain_exactly(project_issue.iid, sub_group_project_issue.iid)
expect(issues.map(&:title)).to contain_exactly(project_issue.title, sub_group_project_issue.title)
end
it 'returns only confidential issues if confidential_only is true' do
issues = subject.issues(confidential_only: true)
expect(issues.map(&:iid)).to contain_exactly(sub_group_project_issue.iid)
expect(issues.map(&:title)).to contain_exactly(sub_group_project_issue.title)
end
end
describe '#merge_requests' do
let(:project) { create(:project, :repository, group: group) }
let(:sub_group_project) { create(:project, :repository, group: sub_group) }
let!(:project_mr) { create(:merge_request, source_project: project) }
let!(:sub_group_project_mr) { create(:merge_request, source_project: sub_group_project) }
it 'returns merge requests in group and subgroups' do
expect(subject.merge_requests.map(&:iid)).to contain_exactly(project_mr.iid, sub_group_project_mr.iid)
expect(subject.merge_requests.map(&:title)).to contain_exactly(project_mr.title, sub_group_project_mr.title)
end
end
describe '#milestones' do
let!(:group_milestone) { create(:milestone, group: group) }
let!(:subgroup_milestone) { create(:milestone, group: sub_group) }
before do
sub_group.add_maintainer(user)
end
context 'when group is public' do
let(:public_group) { create(:group, :public) }
let(:public_subgroup) { create(:group, :public, parent: public_group) }
before do
group_milestone.update!(group: public_group)
subgroup_milestone.update!(group: public_subgroup)
end
it 'returns milestones from groups and subgroups' do
subject = described_class.new(public_subgroup, user)
expect(subject.milestones.map(&:iid)).to contain_exactly(group_milestone.iid, subgroup_milestone.iid)
expect(subject.milestones.map(&:title)).to contain_exactly(group_milestone.title, subgroup_milestone.title)
end
end
it 'returns milestones from group' do
expect(subject.milestones.map(&:iid)).to contain_exactly(group_milestone.iid)
expect(subject.milestones.map(&:title)).to contain_exactly(group_milestone.title)
end
it 'returns milestones from groups and subgroups' do
milestones = described_class.new(sub_group, user).milestones
expect(milestones.map(&:iid)).to contain_exactly(group_milestone.iid, subgroup_milestone.iid)
expect(milestones.map(&:title)).to contain_exactly(group_milestone.title, subgroup_milestone.title)
end
it 'returns only milestones that user can read' do
user = create(:user)
sub_group.add_guest(user)
milestones = described_class.new(sub_group, user).milestones
expect(milestones.map(&:iid)).to contain_exactly(subgroup_milestone.iid)
expect(milestones.map(&:title)).to contain_exactly(subgroup_milestone.title)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
class ParticipantsService < Groups::BaseService
include Gitlab::Utils::StrongMemoize
include Users::ParticipableService
def execute(noteable)
@noteable = noteable
participants =
noteable_owner +
participants_in_noteable +
all_members +
groups +
group_hierarchy_users
render_participants_as_hash(participants.uniq)
end
private
def all_members
return [] if group.nil? || Feature.enabled?(:disable_all_mention)
[{ username: "all", name: "All Group Members", count: group.users_count }]
end
def group_hierarchy_users
return [] unless group
sorted(Autocomplete::GroupUsersFinder.new(group: group).execute)
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::ParticipantsService, feature_category: :groups_and_projects do
describe '#execute' do
let_it_be(:developer) { create(:user) }
let_it_be(:parent_group) { create(:group) }
let_it_be(:group) { create(:group, parent: parent_group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:subproject) { create(:project, group: subgroup) }
let(:service) { described_class.new(group, developer) }
subject(:service_result) { service.execute(nil) }
before_all do
parent_group.add_developer(developer)
end
before do
stub_feature_flags(disable_all_mention: false)
end
it 'includes `All Group Members`' do
group.add_developer(create(:user))
# These should not be included in the count for the @all entry
subgroup.add_developer(create(:user))
subproject.add_developer(create(:user))
expect(service_result).to include(a_hash_including({ username: "all", name: "All Group Members", count: 1 }))
end
context 'when `disable_all_mention` FF is enabled' do
before do
stub_feature_flags(disable_all_mention: true)
end
it 'does not include `All Group Members`' do
expect(service_result).not_to include(a_hash_including({ username: "all", name: "All Group Members" }))
end
end
it 'returns all members in parent groups, sub-groups, and sub-projects' do
parent_group.add_developer(create(:user))
subgroup.add_developer(create(:user))
subproject.add_developer(create(:user))
expected_users = (group.self_and_hierarchy.flat_map(&:users) + subproject.users)
.map { |user| user_to_autocompletable(user) }
expect(expected_users.count).to eq(4)
expect(service_result).to include(*expected_users)
end
context 'when shared with a private group' do
let_it_be(:private_group_member) { create(:user) }
let_it_be(:private_group) { create(:group, :private, :nested) }
before_all do
private_group.add_owner(private_group_member)
create(:group_group_link, shared_group: parent_group, shared_with_group: private_group)
end
subject(:usernames) { service_result.pluck(:username) }
it { is_expected.to include(private_group_member.username) }
end
end
def user_to_autocompletable(user)
{
type: user.class.name,
username: user.username,
name: user.name,
avatar_url: user.avatar_url,
availability: user&.status&.availability
}
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
class TransferService < Groups::BaseService
TransferError = Class.new(StandardError)
attr_reader :error, :new_parent_group
def initialize(group, user, params = {})
super
@error = nil
end
def log_group_transfer_success(group, new_parent_group)
log_transfer(group, new_parent_group, nil)
end
def log_group_transfer_error(group, new_parent_group, error_message)
log_transfer(group, new_parent_group, error_message)
end
def execute(new_parent_group)
@new_parent_group = new_parent_group
ensure_allowed_transfer
proceed_to_transfer
log_group_transfer_success(@group, @new_parent_group)
rescue TransferError, ActiveRecord::RecordInvalid, Gitlab::UpdatePathError => e
@group.errors.clear
@error = s_("TransferGroup|Transfer failed: %{error_message}") % { error_message: e.message }
log_group_transfer_error(@group, @new_parent_group, e.message)
false
end
private
def log_transfer(group, new_namespace, error_message = nil)
action = error_message.nil? ? "was" : "was not"
log_payload = {
message: "Group #{action} transferred to a new namespace",
group_path: group.full_path,
group_id: group.id,
new_parent_group_path: new_parent_group&.full_path,
new_parent_group_id: new_parent_group&.id,
error_message: error_message
}
if error_message.nil?
::Gitlab::AppLogger.info(log_payload)
else
::Gitlab::AppLogger.error(log_payload)
end
end
def proceed_to_transfer
old_root_ancestor_id = @group.root_ancestor.id
was_root_group = @group.root?
Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.temporary_ignore_tables_in_transaction(
%w[routes redirect_routes], url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/424280'
) do
Group.transaction do
update_group_attributes
ensure_ownership
update_integrations
remove_issue_contacts(old_root_ancestor_id, was_root_group)
update_crm_objects(was_root_group)
remove_namespace_commit_emails(was_root_group)
end
end
post_update_hooks(@updated_project_ids, old_root_ancestor_id)
propagate_integrations
update_pending_builds
true
end
# Overridden in EE
def post_update_hooks(updated_project_ids, old_root_ancestor_id)
refresh_project_authorizations
refresh_descendant_groups if @new_parent_group
publish_event(old_root_ancestor_id)
end
# Overridden in EE
def ensure_allowed_transfer
raise_transfer_error(:group_is_already_root) if group_is_already_root?
raise_transfer_error(:same_parent_as_current) if same_parent?
raise_transfer_error(:has_subscription) if has_subscription?
raise_transfer_error(:invalid_policies) unless valid_policies?
raise_transfer_error(:namespace_with_same_path) if namespace_with_same_path?
raise_transfer_error(:group_contains_images) if group_projects_contain_registry_images?
raise_transfer_error(:cannot_transfer_to_subgroup) if transfer_to_subgroup?
raise_transfer_error(:group_contains_namespaced_npm_packages) if group_with_namespaced_npm_packages?
raise_transfer_error(:no_permissions_to_migrate_crm) if no_permissions_to_migrate_crm?
end
def no_permissions_to_migrate_crm?
return false unless group && @new_parent_group
return false if group.root_ancestor == @new_parent_group.root_ancestor
return true if group.contacts.exists? && !current_user.can?(:admin_crm_contact, @new_parent_group.root_ancestor)
return true if group.crm_organizations.exists? && !current_user.can?(:admin_crm_organization, @new_parent_group.root_ancestor)
false
end
def group_with_namespaced_npm_packages?
return false unless group.packages_feature_enabled?
npm_packages = ::Packages::GroupPackagesFinder.new(current_user, group, package_type: :npm, preload_pipelines: false).execute
npm_packages = npm_packages.with_npm_scope(group.root_ancestor.path)
different_root_ancestor? && npm_packages.exists?
end
def different_root_ancestor?
group.root_ancestor != new_parent_group&.root_ancestor
end
def group_is_already_root?
!@new_parent_group && [email protected]_parent?
end
def same_parent?
@new_parent_group && @new_parent_group.id == @group.parent_id
end
def has_subscription?
@group.paid?
end
def transfer_to_subgroup?
@new_parent_group && \
@group.self_and_descendants.pluck_primary_key.include?(@new_parent_group.id)
end
def valid_policies?
return false unless can?(current_user, :admin_group, @group)
if @new_parent_group
can?(current_user, :create_subgroup, @new_parent_group)
else
can?(current_user, :create_group)
end
end
# rubocop: disable CodeReuse/ActiveRecord
def namespace_with_same_path?
Namespace.exists?(path: @group.path, parent: @new_parent_group)
end
# rubocop: enable CodeReuse/ActiveRecord
def group_projects_contain_registry_images?
@group.has_container_repository_including_subgroups?
end
def update_group_attributes
if @new_parent_group && @new_parent_group.visibility_level < @group.visibility_level
update_children_and_projects_visibility
@group.visibility_level = @new_parent_group.visibility_level
end
update_two_factor_authentication if @new_parent_group
@group.parent = @new_parent_group
@group.clear_memoization(:self_and_ancestors_ids)
@group.clear_memoization(:root_ancestor) if different_root_ancestor?
inherit_group_shared_runners_settings
@group.save!
# #reload is called to make sure traversal_ids are reloaded
@group.reload # rubocop:disable Cop/ActiveRecordAssociationReload
end
# rubocop: disable CodeReuse/ActiveRecord
def update_children_and_projects_visibility
descendants = @group.descendants.where("visibility_level > ?", @new_parent_group.visibility_level)
Group
.where(id: descendants.select(:id))
.update_all(visibility_level: @new_parent_group.visibility_level)
projects_to_update = @group
.all_projects
.where("visibility_level > ?", @new_parent_group.visibility_level)
# Used in post_update_hooks in EE. Must use pluck (and not select)
# here as after we perform the update below we won't be able to find
# these records again.
@updated_project_ids = projects_to_update.pluck(:id)
Namespaces::ProjectNamespace
.where(id: projects_to_update.select(:project_namespace_id))
.update_all(visibility_level: @new_parent_group.visibility_level)
projects_to_update
.update_all(visibility_level: @new_parent_group.visibility_level)
update_project_settings(@updated_project_ids)
end
# Overridden in EE
def update_project_settings(updated_project_ids)
end
def update_two_factor_authentication
return if namespace_parent_allows_two_factor_auth
@group.require_two_factor_authentication = false
end
def refresh_descendant_groups
return if namespace_parent_allows_two_factor_auth
if @group.descendants.where(require_two_factor_authentication: true).any?
DisallowTwoFactorForSubgroupsWorker.perform_async(@group.id)
end
end
# rubocop: enable CodeReuse/ActiveRecord
def namespace_parent_allows_two_factor_auth
@new_parent_group.namespace_settings.allow_mfa_for_subgroups
end
def ensure_ownership
return if @new_parent_group
return unless @group.owners.empty?
add_owner_on_transferred_group
end
# Overridden in EE
def add_owner_on_transferred_group
@group.add_owner(current_user)
end
def refresh_project_authorizations
project_ids = Groups::ProjectsRequiringAuthorizationsRefresh::OnTransferFinder.new(@group).execute
AuthorizedProjectUpdate::ProjectAccessChangedService.new(project_ids).execute
end
def raise_transfer_error(message)
raise TransferError, localized_error_messages[message]
end
# Overridden in EE
def localized_error_messages
{
database_not_supported: s_('TransferGroup|Database is not supported.'),
namespace_with_same_path: s_('TransferGroup|The parent group already has a subgroup or a project with the same path.'),
group_is_already_root: s_('TransferGroup|Group is already a root group.'),
same_parent_as_current: s_('TransferGroup|Group is already associated to the parent group.'),
invalid_policies: s_("TransferGroup|You don't have enough permissions."),
group_contains_images: s_('TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again.'),
cannot_transfer_to_subgroup: s_('TransferGroup|Cannot transfer group to one of its subgroup.'),
group_contains_namespaced_npm_packages: s_('TransferGroup|Group contains projects with NPM packages scoped to the current root level group.'),
no_permissions_to_migrate_crm: s_("TransferGroup|Group contains contacts/organizations and you don't have enough permissions to move them to the new root group.")
}.freeze
end
def inherit_group_shared_runners_settings
parent_setting = @group.parent&.shared_runners_setting
return unless parent_setting
if @group.shared_runners_setting_higher_than?(parent_setting)
result = Groups::UpdateSharedRunnersService.new(@group, current_user, shared_runners_setting: parent_setting).execute
raise TransferError, result[:message] unless result[:status] == :success
end
end
def update_integrations
@group.integrations.with_default_settings.delete_all
Integration.create_from_active_default_integrations(@group, :group_id)
end
def propagate_integrations
@group.integrations.with_default_settings.each do |integration|
PropagateIntegrationWorker.perform_async(integration.id)
end
end
def update_pending_builds
::Ci::PendingBuilds::UpdateGroupWorker.perform_async(group.id, pending_builds_params)
end
def pending_builds_params
{
namespace_traversal_ids: group.traversal_ids,
namespace_id: group.id
}
end
def update_crm_objects(was_root_group)
return unless was_root_group
CustomerRelations::Contact.move_to_root_group(group)
CustomerRelations::Organization.move_to_root_group(group)
end
def remove_issue_contacts(old_root_ancestor_id, was_root_group)
return if was_root_group
return if old_root_ancestor_id == @group.root_ancestor.id
CustomerRelations::IssueContact.delete_for_group(@group)
end
def publish_event(old_root_ancestor_id)
event = ::Groups::GroupTransferedEvent.new(
data: {
group_id: group.id,
old_root_namespace_id: old_root_ancestor_id,
new_root_namespace_id: group.root_ancestor.id
}
)
Gitlab::EventStore.publish(event)
end
def remove_namespace_commit_emails(was_root_group)
Users::NamespaceCommitEmail.delete_for_namespace(@group) if was_root_group
end
end
end
Groups::TransferService.prepend_mod_with('Groups::TransferService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::TransferService, :sidekiq_inline, feature_category: :groups_and_projects do
shared_examples 'project namespace path is in sync with project path' do
it 'keeps project and project namespace attributes in sync' do
projects_with_project_namespace.each do |project|
project.reload
expect(project.full_path).to eq("#{group_full_path}/#{project.path}")
expect(project.project_namespace.full_path).to eq(project.full_path)
expect(project.project_namespace.parent).to eq(project.namespace)
expect(project.project_namespace.visibility_level).to eq(project.visibility_level)
end
end
end
let_it_be(:user) { create(:user) }
let_it_be(:new_parent_group) { create(:group, :public, :crm_enabled) }
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
let(:transfer_service) { described_class.new(group, user) }
shared_examples 'publishes a GroupTransferedEvent' do
it do
expect { transfer_service.execute(target) }
.to publish_event(Groups::GroupTransferedEvent)
.with(
group_id: group.id,
old_root_namespace_id: group.root_ancestor.id,
new_root_namespace_id: target.root_ancestor.id
)
end
end
context 'handling packages' do
let_it_be(:group) { create(:group) }
let_it_be(:new_group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
before do
group.add_owner(user)
new_group&.add_owner(user)
end
context 'with an npm package' do
let_it_be(:npm_package) { create(:npm_package, project: project, name: "@testscope/test") }
shared_examples 'transfer allowed' do
it 'allows transfer' do
transfer_service.execute(new_group)
expect(transfer_service.error).to be nil
expect(group.parent).to eq(new_group)
end
end
it_behaves_like 'transfer allowed'
context 'with a project within subgroup' do
let_it_be(:root_group) { create(:group) }
let_it_be(:group) { create(:group, parent: root_group) }
let_it_be(:project) { create(:project, namespace: group) }
before do
root_group.add_owner(user)
end
it_behaves_like 'transfer allowed'
context 'without a root namespace change' do
let_it_be(:new_group) { create(:group, parent: root_group) }
it_behaves_like 'transfer allowed'
end
context 'with namespaced packages present' do
let_it_be(:package) { create(:npm_package, project: project, name: "@#{project.root_namespace.path}/test") }
it 'does not allow transfer' do
transfer_service.execute(new_group)
expect(transfer_service.error).to eq('Transfer failed: Group contains projects with NPM packages scoped to the current root level group.')
expect(group.parent).not_to eq(new_group)
end
context 'namespaced package is pending destruction' do
let!(:group) { create(:group) }
before do
package.pending_destruction!
end
it_behaves_like 'transfer allowed'
end
end
context 'when transferring a group into a root group' do
let_it_be(:root_group) { create(:group) }
let_it_be(:group) { create(:group, parent: root_group) }
let_it_be(:new_group) { nil }
it_behaves_like 'transfer allowed'
end
end
end
context 'without an npm package' do
context 'when transferring a group into a root group' do
let(:group) { create(:group, parent: create(:group)) }
it 'allows transfer' do
transfer_service.execute(nil)
expect(transfer_service.error).to be nil
expect(group.parent).to be_nil
end
end
end
end
shared_examples 'ensuring allowed transfer for a group' do
context "when there's an exception on GitLab shell directories" do
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:update_group_attributes).and_raise(Gitlab::UpdatePathError, 'namespace directory cannot be moved')
end
create(:group_member, :owner, group: new_parent_group, user: user)
end
it 'returns false' do
expect(transfer_service.execute(new_parent_group)).to be_falsy
end
it 'adds an error on group' do
transfer_service.execute(new_parent_group)
expect(transfer_service.error).to eq('Transfer failed: namespace directory cannot be moved')
end
end
end
describe '#execute' do
context 'when transforming a group into a root group' do
let_it_be_with_reload(:group) { create(:group, :public, :nested) }
it_behaves_like 'ensuring allowed transfer for a group'
context 'when the group is already a root group' do
let(:group) { create(:group, :public) }
it 'adds an error on group' do
transfer_service.execute(nil)
expect(transfer_service.error).to eq('Transfer failed: Group is already a root group.')
end
end
context 'when the user does not have the right policies' do
let_it_be(:group_member) { create(:group_member, :guest, group: group, user: user) }
it "returns false" do
expect(transfer_service.execute(nil)).to be_falsy
end
it "adds an error on group" do
transfer_service.execute(new_parent_group)
expect(transfer_service.error).to eq("Transfer failed: You don't have enough permissions.")
end
end
context 'when there is a group with the same path' do
let_it_be(:group) { create(:group, :public, :nested, path: 'not-unique') }
before do
create(:group, path: 'not-unique')
end
it 'returns false' do
expect(transfer_service.execute(nil)).to be_falsy
end
it 'adds an error on group' do
transfer_service.execute(nil)
expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup or a project with the same path.')
end
end
context 'when the group is a subgroup and the transfer is valid' do
let_it_be(:subgroup1) { create(:group, :private, parent: group) }
let_it_be(:subgroup2) { create(:group, :internal, parent: group) }
let_it_be(:project1) { create(:project, :repository, :private, namespace: group) }
before do
transfer_service.execute(nil)
group.reload
end
it 'updates group attributes' do
expect(group.parent).to be_nil
end
it 'updates group children path' do
group.children.each do |subgroup|
expect(subgroup.full_path).to eq("#{group.path}/#{subgroup.path}")
end
end
it 'updates group projects path' do
group.projects.each do |project|
expect(project.full_path).to eq("#{group.path}/#{project.path}")
end
end
context 'when projects have project namespaces' do
let_it_be(:project1) { create(:project, :private, namespace: group) }
let_it_be(:project2) { create(:project, :private, namespace: group) }
it_behaves_like 'project namespace path is in sync with project path' do
let(:group_full_path) { group.path.to_s }
let(:projects_with_project_namespace) { [project1, project2] }
end
end
end
end
context 'when transferring a subgroup into another group' do
let_it_be_with_reload(:group) { create(:group, :public, :nested) }
it_behaves_like 'ensuring allowed transfer for a group'
context 'when the new parent group is the same as the previous parent group' do
let_it_be(:group) { create(:group, :public, :nested, parent: new_parent_group) }
it 'returns false' do
expect(transfer_service.execute(new_parent_group)).to be_falsy
end
it 'adds an error on group' do
transfer_service.execute(new_parent_group)
expect(transfer_service.error).to eq('Transfer failed: Group is already associated to the parent group.')
end
end
context 'when the user does not have the right policies' do
let_it_be(:group_member) { create(:group_member, :guest, group: group, user: user) }
it "returns false" do
expect(transfer_service.execute(new_parent_group)).to be_falsy
end
it "adds an error on group" do
transfer_service.execute(new_parent_group)
expect(transfer_service.error).to eq("Transfer failed: You don't have enough permissions.")
end
end
context 'when the parent has a group with the same path' do
before do
create(:group_member, :owner, group: new_parent_group, user: user)
group.update_attribute(:path, "not-unique")
create(:group, path: "not-unique", parent: new_parent_group)
end
it 'returns false' do
expect(transfer_service.execute(new_parent_group)).to be_falsy
end
it 'adds an error on group' do
transfer_service.execute(new_parent_group)
expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup or a project with the same path.')
end
end
context 'when the parent group has a project with the same path' do
let_it_be_with_reload(:group) { create(:group, :public, :nested, path: 'foo') }
let_it_be(:membership) { create(:group_member, :owner, group: new_parent_group, user: user) }
let_it_be(:project) { create(:project, path: 'foo', namespace: new_parent_group) }
it 'adds an error on group' do
expect(transfer_service.execute(new_parent_group)).to be_falsy
expect(transfer_service.error).to eq('Transfer failed: The parent group already has a subgroup or a project with the same path.')
end
end
context 'when projects have project namespaces' do
let_it_be(:project) { create(:project, path: 'foo', namespace: new_parent_group) }
before do
transfer_service.execute(new_parent_group)
end
it_behaves_like 'project namespace path is in sync with project path' do
let(:group_full_path) { new_parent_group.full_path.to_s }
let(:projects_with_project_namespace) { [project] }
end
end
context 'when the group is allowed to be transferred' do
let_it_be(:new_parent_group, reload: true) { create(:group, :public) }
let_it_be(:new_parent_group_integration) { create(:integrations_slack, :group, group: new_parent_group, webhook: 'http://new-group.slack.com') }
before do
allow(PropagateIntegrationWorker).to receive(:perform_async)
create(:group_member, :owner, group: new_parent_group, user: user)
transfer_service.execute(new_parent_group)
end
context 'when the group has a lower visibility than the parent group' do
let(:new_parent_group) { create(:group, :public) }
let(:group) { create(:group, :private, :nested) }
it 'does not update the visibility for the group' do
group.reload
expect(group.private?).to be_truthy
expect(group.visibility_level).not_to eq(new_parent_group.visibility_level)
end
end
context 'when the group has a higher visibility than the parent group' do
let(:new_parent_group) { create(:group, :private) }
let(:group) { create(:group, :public, :nested) }
it 'updates visibility level based on the parent group' do
group.reload
expect(group.private?).to be_truthy
expect(group.visibility_level).to eq(new_parent_group.visibility_level)
end
end
context 'with a group integration' do
let(:new_created_integration) { Integration.find_by(group: group) }
context 'with an inherited integration' do
let_it_be(:instance_integration) { create(:integrations_slack, :instance, webhook: 'http://project.slack.com') }
let_it_be(:group_integration) { create(:integrations_slack, :group, group: group, webhook: 'http://group.slack.com', inherit_from_id: instance_integration.id) }
it 'replaces inherited integrations', :aggregate_failures do
expect(new_created_integration.webhook).to eq(new_parent_group_integration.webhook)
expect(PropagateIntegrationWorker).to have_received(:perform_async).with(new_created_integration.id)
expect(Integration.count).to eq(3)
end
end
context 'with a custom integration' do
let_it_be(:group_integration) { create(:integrations_slack, :group, group: group, webhook: 'http://group.slack.com') }
it 'does not updates the integrations', :aggregate_failures do
expect { transfer_service.execute(new_parent_group) }.not_to change { group_integration.webhook }
expect(PropagateIntegrationWorker).not_to have_received(:perform_async)
end
end
end
it 'updates visibility for the group based on the parent group' do
expect(group.visibility_level).to eq(new_parent_group.visibility_level)
end
it 'updates parent group to the new parent' do
expect(group.parent).to eq(new_parent_group)
end
it 'returns the group as children of the new parent' do
expect(new_parent_group.children.count).to eq(1)
expect(new_parent_group.children.first).to eq(group)
end
it 'creates a redirect for the group' do
expect(group.redirect_routes.count).to eq(1)
end
end
context 'shared runners configuration' do
before do
create(:group_member, :owner, group: new_parent_group, user: user)
end
context 'if parent group has disabled shared runners but allows overrides' do
let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true) }
it 'calls update service' do
expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_AND_OVERRIDABLE }).and_call_original
transfer_service.execute(new_parent_group)
end
end
context 'if parent group does not allow shared runners' do
let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false) }
it 'calls update service' do
expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE }).and_call_original
transfer_service.execute(new_parent_group)
end
end
context 'if parent group allows shared runners' do
let(:group) { create(:group, :public, :nested, shared_runners_enabled: false) }
let(:new_parent_group) { create(:group, shared_runners_enabled: true) }
it 'does not call update service and keeps them disabled on the group' do
expect(Groups::UpdateSharedRunnersService).not_to receive(:new)
transfer_service.execute(new_parent_group)
expect(group.reload.shared_runners_enabled).to be_falsy
end
end
end
context 'when a group is transferred to its subgroup' do
let(:new_parent_group) { create(:group, parent: group) }
it 'does not execute the transfer' do
expect(transfer_service.execute(new_parent_group)).to be_falsy
expect(transfer_service.error).to match(/Cannot transfer group to one of its subgroup/)
end
end
context 'when transferring a group with group descendants' do
let!(:subgroup1) { create(:group, :private, parent: group) }
let!(:subgroup2) { create(:group, :internal, parent: group) }
before do
create(:group_member, :owner, group: new_parent_group, user: user)
transfer_service.execute(new_parent_group)
end
it 'updates subgroups path' do
new_parent_path = new_parent_group.path
group.children.each do |subgroup|
expect(subgroup.full_path).to eq("#{new_parent_path}/#{group.path}/#{subgroup.path}")
end
end
it 'creates redirects for the subgroups' do
expect(group.redirect_routes.count).to eq(1)
expect(subgroup1.redirect_routes.count).to eq(1)
expect(subgroup2.redirect_routes.count).to eq(1)
end
context 'when the new parent has a higher visibility than the children' do
it 'does not update the children visibility' do
expect(subgroup1.private?).to be_truthy
expect(subgroup2.internal?).to be_truthy
end
end
context 'when the new parent has a lower visibility than the children' do
let!(:subgroup1) { create(:group, :public, parent: group) }
let!(:subgroup2) { create(:group, :public, parent: group) }
let(:new_parent_group) { create(:group, :private) }
it 'updates children visibility to match the new parent' do
group.children.each do |subgroup|
expect(subgroup.private?).to be_truthy
end
end
end
end
context 'when transferring a group with project descendants' do
let!(:project1) { create(:project, :repository, :private, namespace: group) }
let!(:project2) { create(:project, :repository, :internal, namespace: group) }
before do
TestEnv.clean_test_path
create(:group_member, :owner, group: new_parent_group, user: user)
allow(transfer_service).to receive(:update_project_settings)
transfer_service.execute(new_parent_group)
end
it 'updates projects path' do
new_parent_path = new_parent_group.path
group.projects.each do |project|
expect(project.full_path).to eq("#{new_parent_path}/#{group.path}/#{project.path}")
end
end
it 'creates permanent redirects for the projects' do
expect(group.redirect_routes.count).to eq(1)
expect(project1.redirect_routes.count).to eq(1)
expect(project2.redirect_routes.count).to eq(1)
end
context 'when the new parent has a higher visibility than the projects' do
it 'does not update projects visibility' do
expect(project1.private?).to be_truthy
expect(project2.internal?).to be_truthy
end
it_behaves_like 'project namespace path is in sync with project path' do
let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
let(:projects_with_project_namespace) { [project1, project2] }
end
end
context 'when the new parent has a lower visibility than the projects' do
let!(:project1) { create(:project, :repository, :public, namespace: group) }
let!(:project2) { create(:project, :repository, :public, namespace: group) }
let!(:new_parent_group) { create(:group, :private) }
it 'updates projects visibility to match the new parent' do
group.projects.each do |project|
expect(project.private?).to be_truthy
end
end
it 'invokes #update_project_settings' do
expect(transfer_service).to have_received(:update_project_settings)
.with(group.projects.pluck(:id))
end
it_behaves_like 'project namespace path is in sync with project path' do
let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
let(:projects_with_project_namespace) { [project1, project2] }
end
end
end
context 'when transferring a group with subgroups & projects descendants' do
let!(:project1) { create(:project, :repository, :private, namespace: group) }
let!(:project2) { create(:project, :repository, :internal, namespace: group) }
let!(:subgroup1) { create(:group, :private, parent: group) }
let!(:subgroup2) { create(:group, :internal, parent: group) }
before do
TestEnv.clean_test_path
create(:group_member, :owner, group: new_parent_group, user: user)
transfer_service.execute(new_parent_group)
end
it 'updates subgroups path' do
new_parent_path = new_parent_group.path
group.children.each do |subgroup|
expect(subgroup.full_path).to eq("#{new_parent_path}/#{group.path}/#{subgroup.path}")
end
end
it 'updates projects path' do
new_parent_path = new_parent_group.path
group.projects.each do |project|
expect(project.full_path).to eq("#{new_parent_path}/#{group.path}/#{project.path}")
end
end
it 'creates redirect for the subgroups and projects' do
expect(group.redirect_routes.count).to eq(1)
expect(subgroup1.redirect_routes.count).to eq(1)
expect(subgroup2.redirect_routes.count).to eq(1)
expect(project1.redirect_routes.count).to eq(1)
expect(project2.redirect_routes.count).to eq(1)
end
it_behaves_like 'project namespace path is in sync with project path' do
let(:group_full_path) { "#{new_parent_group.path}/#{group.path}" }
let(:projects_with_project_namespace) { [project1, project2] }
end
end
context 'when transferring a group with nested groups and projects' do
let(:subgroup1) { create(:group, :private, parent: group) }
let!(:project1) { create(:project, :repository, :private, namespace: group) }
let!(:nested_subgroup) { create(:group, :private, parent: subgroup1) }
let!(:nested_project) { create(:project, :repository, :private, namespace: subgroup1) }
before do
TestEnv.clean_test_path
create(:group_member, :owner, group: new_parent_group, user: user)
end
context 'updated paths' do
let_it_be_with_reload(:group) { create(:group, :public) }
before do
transfer_service.execute(new_parent_group)
end
it 'updates subgroups path' do
new_base_path = "#{new_parent_group.path}/#{group.path}"
group.children.each do |children|
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
end
new_base_path = "#{new_parent_group.path}/#{group.path}/#{subgroup1.path}"
subgroup1.children.each do |children|
expect(children.full_path).to eq("#{new_base_path}/#{children.path}")
end
end
it 'updates projects path' do
new_parent_path = "#{new_parent_group.path}/#{group.path}"
subgroup1.projects.each do |project|
project_full_path = "#{new_parent_path}/#{project.namespace.path}/#{project.path}"
expect(project.full_path).to eq(project_full_path)
end
end
it 'creates redirect for the subgroups and projects' do
expect(group.redirect_routes.count).to eq(1)
expect(project1.redirect_routes.count).to eq(1)
expect(subgroup1.redirect_routes.count).to eq(1)
expect(nested_subgroup.redirect_routes.count).to eq(1)
expect(nested_project.redirect_routes.count).to eq(1)
end
end
context 'resets project authorizations' do
let_it_be(:old_parent_group) { create(:group) }
let_it_be_with_refind(:group) { create(:group, :private, parent: old_parent_group) }
let_it_be(:new_group_member) { create(:user) }
let_it_be(:old_group_member) { create(:user) }
let_it_be(:unique_subgroup_member) { create(:user) }
let_it_be(:direct_project_member) { create(:user) }
before do
new_parent_group.add_maintainer(new_group_member)
old_parent_group.add_maintainer(old_group_member)
subgroup1.add_developer(unique_subgroup_member)
nested_project.add_developer(direct_project_member)
group.refresh_members_authorized_projects
subgroup1.refresh_members_authorized_projects
end
it 'removes old project authorizations' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: project1.id, user_id: old_group_member.id).size
}.from(1).to(0)
end
it 'adds new project authorizations' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: project1.id, user_id: new_group_member.id).size
}.from(0).to(1)
end
it 'performs authorizations job' do
expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_async)
transfer_service.execute(new_parent_group)
end
context 'for nested projects' do
it 'removes old project authorizations' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: nested_project.id, user_id: old_group_member.id).size
}.from(1).to(0)
end
it 'adds new project authorizations' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: nested_project.id, user_id: new_group_member.id).size
}.from(0).to(1)
end
it 'preserves existing project authorizations for direct project members' do
expect { transfer_service.execute(new_parent_group) }.not_to change {
ProjectAuthorization.where(project_id: nested_project.id, user_id: direct_project_member.id).count
}
end
end
context 'for nested groups with unique members' do
it 'preserves existing project authorizations' do
expect { transfer_service.execute(new_parent_group) }.not_to change {
ProjectAuthorization.where(project_id: nested_project.id, user_id: unique_subgroup_member.id).count
}
end
end
context 'for groups with many projects' do
let_it_be(:project_list) { create_list(:project, 11, :repository, :private, namespace: group) }
it 'adds new project authorizations for the user which makes a transfer' do
transfer_service.execute(new_parent_group)
expect(ProjectAuthorization.where(project_id: project1.id, user_id: user.id).size).to eq(1)
expect(ProjectAuthorization.where(project_id: nested_project.id, user_id: user.id).size).to eq(1)
end
it 'adds project authorizations for users in the new hierarchy' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: project_list.map { |project| project.id }, user_id: new_group_member.id).size
}.from(0).to(project_list.count)
end
it 'removes project authorizations for users in the old hierarchy' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project_id: project_list.map { |project| project.id }, user_id: old_group_member.id).size
}.from(project_list.count).to(0)
end
it 'schedules authorizations job' do
expect(AuthorizedProjectUpdate::ProjectRecalculateWorker).to receive(:bulk_perform_async)
.with(array_including(group.all_projects.ids.map { |id| [id] }))
transfer_service.execute(new_parent_group)
end
end
context 'transferring groups with shared_projects' do
let_it_be_with_reload(:shared_project) { create(:project, :public) }
shared_examples_for 'drops the authorizations of ancestor members from the old hierarchy' do
it 'drops the authorizations of ancestor members from the old hierarchy' do
expect { transfer_service.execute(new_parent_group) }.to change {
ProjectAuthorization.where(project: shared_project, user: old_group_member).size
}.from(1).to(0)
end
end
context 'when the group that has existing project share is transferred' do
before do
create(:project_group_link, :maintainer, project: shared_project, group: group)
end
it_behaves_like 'drops the authorizations of ancestor members from the old hierarchy'
end
context 'when the group whose subgroup has an existing project share is transferred' do
let_it_be_with_reload(:subgroup) { create(:group, :private, parent: group) }
before do
create(:project_group_link, :maintainer, project: shared_project, group: subgroup)
end
it_behaves_like 'drops the authorizations of ancestor members from the old hierarchy'
end
end
context 'when a group that has existing group share is transferred' do
let(:shared_with_group) { group }
let_it_be(:member_of_shared_with_group) { create(:user) }
let_it_be(:shared_group) { create(:group, :private) }
let_it_be(:project_in_shared_group) { create(:project, namespace: shared_group) }
before do
shared_with_group.add_developer(member_of_shared_with_group)
create(:group_group_link, :maintainer, shared_group: shared_group, shared_with_group: shared_with_group)
shared_with_group.refresh_members_authorized_projects
end
it 'retains the authorizations of direct members' do
expect { transfer_service.execute(new_parent_group) }.not_to change {
ProjectAuthorization.where(
project: project_in_shared_group,
user: member_of_shared_with_group,
access_level: Gitlab::Access::DEVELOPER).size
}.from(1)
end
end
end
end
context 'when transferring a group with two factor authentication switched on' do
before do
TestEnv.clean_test_path
create(:group_member, :owner, group: new_parent_group, user: user)
create(:group, :private, parent: group, require_two_factor_authentication: true)
group.update!(require_two_factor_authentication: true)
new_parent_group.reload # make sure traversal_ids are reloaded
end
it 'does not update group two factor authentication setting' do
transfer_service.execute(new_parent_group)
expect(group.require_two_factor_authentication).to eq(true)
end
context 'when new parent disallows two factor authentication switched on for descendants' do
before do
new_parent_group.namespace_settings.update!(allow_mfa_for_subgroups: false)
end
it 'updates group two factor authentication setting' do
transfer_service.execute(new_parent_group)
expect(group.require_two_factor_authentication).to eq(false)
end
it 'schedules update of group two factor authentication setting for descendants' do
expect(DisallowTwoFactorForSubgroupsWorker).to receive(:perform_async).with(group.id)
transfer_service.execute(new_parent_group)
end
end
end
context 'when updating the group goes wrong' do
let!(:subgroup1) { create(:group, :public, parent: group) }
let!(:subgroup2) { create(:group, :public, parent: group) }
let(:new_parent_group) { create(:group, :private) }
let!(:project1) { create(:project, :repository, :public, namespace: group) }
before do
allow(group).to receive(:save!).and_raise(ActiveRecord::RecordInvalid.new(group))
TestEnv.clean_test_path
create(:group_member, :owner, group: new_parent_group, user: user)
transfer_service.execute(new_parent_group)
end
it 'restores group and projects visibility' do
subgroup1.reload
project1.reload
expect(subgroup1.public?).to be_truthy
expect(project1.public?).to be_truthy
end
end
context 'when group has pending builds', :sidekiq_inline do
let_it_be(:project) { create(:project, :public, namespace: group.reload) }
let_it_be(:other_project) { create(:project) }
let_it_be(:pending_build) { create(:ci_pending_build, project: project) }
let_it_be(:unrelated_pending_build) { create(:ci_pending_build, project: other_project) }
before do
group.add_owner(user)
new_parent_group.add_owner(user)
end
it 'updates pending builds for the group', :aggregate_failures do
transfer_service.execute(new_parent_group)
pending_build.reload
unrelated_pending_build.reload
expect(pending_build.namespace_id).to eq(group.id)
expect(pending_build.namespace_traversal_ids).to eq(group.traversal_ids)
expect(unrelated_pending_build.namespace_id).to eq(other_project.namespace_id)
expect(unrelated_pending_build.namespace_traversal_ids).to eq(other_project.namespace.traversal_ids)
end
end
end
context 'when transferring a subgroup into root group' do
let(:group) { create(:group, :public, :nested) }
let(:subgroup) { create(:group, :public, parent: group) }
let(:transfer_service) { described_class.new(subgroup, user) }
it 'ensures there is still an owner for the transferred group' do
expect(subgroup.owners).to be_empty
transfer_service.execute(nil)
subgroup.reload
expect(subgroup.owners).to match_array(user)
end
context 'when group has explicit owner' do
let(:another_owner) { create(:user) }
let!(:another_member) { create(:group_member, :owner, group: subgroup, user: another_owner) }
it 'does not add additional owner' do
expect(subgroup.owners).to match_array(another_owner)
transfer_service.execute(nil)
subgroup.reload
expect(subgroup.owners).to match_array(another_owner)
end
end
end
context 'when a project has container images' do
let(:group) { create(:group, :public, :nested) }
let!(:container_repository) { create(:container_repository, project: project) }
subject { transfer_service.execute(new_parent_group) }
before do
group.add_owner(user)
new_parent_group.add_owner(user)
end
context 'within group' do
let(:project) { create(:project, :repository, :public, namespace: group) }
it 'does not transfer' do
expect(subject).to be false
expect(transfer_service.error).to match(/Docker images in their Container Registry/)
end
end
context 'within subgroup' do
let(:subgroup) { create(:group, parent: group) }
let(:project) { create(:project, :repository, :public, namespace: subgroup) }
it 'does not transfer' do
expect(subject).to be false
expect(transfer_service.error).to match(/Docker images in their Container Registry/)
end
end
end
context 'crm' do
let(:root_group) { create(:group, :public) }
let(:subgroup) { create(:group, :public, parent: root_group) }
let(:another_subgroup) { create(:group, :public, parent: root_group) }
let(:subsubgroup) { create(:group, :public, parent: subgroup) }
let(:root_project) { create(:project, group: root_group) }
let(:sub_project) { create(:project, group: subgroup) }
let(:another_project) { create(:project, group: another_subgroup) }
let(:subsub_project) { create(:project, group: subsubgroup) }
let!(:contacts) { create_list(:contact, 4, group: root_group) }
let!(:crm_organizations) { create_list(:crm_organization, 2, group: root_group) }
before do
create(:issue_customer_relations_contact, contact: contacts[0], issue: create(:issue, project: root_project))
create(:issue_customer_relations_contact, contact: contacts[1], issue: create(:issue, project: sub_project))
create(:issue_customer_relations_contact, contact: contacts[2], issue: create(:issue, project: another_project))
create(:issue_customer_relations_contact, contact: contacts[3], issue: create(:issue, project: subsub_project))
root_group.add_owner(user)
end
context 'moving up' do
let(:group) { subsubgroup }
it 'retains issue contacts' do
expect { transfer_service.execute(root_group) }
.not_to change { CustomerRelations::IssueContact.count }
end
it_behaves_like 'publishes a GroupTransferedEvent' do
let(:target) { root_group }
end
end
context 'moving down' do
let(:group) { subgroup }
it 'retains issue contacts' do
expect { transfer_service.execute(another_subgroup) }
.not_to change { CustomerRelations::IssueContact.count }
end
it_behaves_like 'publishes a GroupTransferedEvent' do
let(:target) { another_subgroup }
end
end
context 'moving sideways' do
let(:group) { subsubgroup }
it 'retains issue contacts' do
expect { transfer_service.execute(another_subgroup) }
.not_to change { CustomerRelations::IssueContact.count }
end
it_behaves_like 'publishes a GroupTransferedEvent' do
let(:target) { another_subgroup }
end
end
context 'moving to new root group' do
let(:group) { root_group }
before do
new_parent_group.add_owner(user)
end
it 'moves all crm objects' do
expect { transfer_service.execute(new_parent_group) }
.to change { root_group.contacts.count }.by(-4)
.and change { root_group.crm_organizations.count }.by(-2)
end
it 'retains issue contacts' do
expect { transfer_service.execute(new_parent_group) }
.not_to change { CustomerRelations::IssueContact.count }
end
it_behaves_like 'publishes a GroupTransferedEvent' do
let(:target) { new_parent_group }
end
end
context 'moving to a subgroup within a new root group' do
let(:group) { root_group }
let(:subgroup_in_new_parent_group) { create(:group, parent: new_parent_group) }
context 'with permission on the root group' do
before do
new_parent_group.add_owner(user)
end
it 'moves all crm objects' do
expect { transfer_service.execute(subgroup_in_new_parent_group) }
.to change { root_group.contacts.count }.by(-4)
.and change { root_group.crm_organizations.count }.by(-2)
end
it 'retains issue contacts' do
expect { transfer_service.execute(subgroup_in_new_parent_group) }
.not_to change { CustomerRelations::IssueContact.count }
end
it_behaves_like 'publishes a GroupTransferedEvent' do
let(:target) { subgroup_in_new_parent_group }
end
end
context 'with permission on the subgroup' do
before do
subgroup_in_new_parent_group.add_owner(user)
end
it 'raises error' do
transfer_service.execute(subgroup_in_new_parent_group)
expect(transfer_service.error).to eq("Transfer failed: Group contains contacts/organizations and you don't have enough permissions to move them to the new root group.")
end
it 'does not publish a GroupTransferedEvent' do
expect { transfer_service.execute(subgroup_in_new_parent_group) }
.not_to publish_event(Groups::GroupTransferedEvent)
end
end
end
end
context 'with namespace_commit_emails concerns' do
let_it_be(:group, reload: true) { create(:group) }
let_it_be(:target) { create(:group) }
before do
group.add_owner(user)
target.add_owner(user)
end
context 'when origin is a root group' do
before do
create_list(:namespace_commit_email, 2, namespace: group)
end
it 'deletes all namespace_commit_emails' do
expect { transfer_service.execute(target) }
.to change { group.namespace_commit_emails.count }.by(-2)
end
it_behaves_like 'publishes a GroupTransferedEvent'
end
context 'when origin is not a root group' do
let(:group) { create(:group, parent: create(:group)) }
it 'does not attempt to delete namespace_commit_emails' do
expect(Users::NamespaceCommitEmail).not_to receive(:delete_for_namespace)
transfer_service.execute(target)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
class AutoDevopsService < Groups::BaseService
def execute
raise Gitlab::Access::AccessDeniedError unless can?(current_user, :admin_group, group)
group.update(auto_devops_enabled: auto_devops_enabled)
end
private
def auto_devops_enabled
params[:auto_devops_enabled]
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::AutoDevopsService, '#execute', feature_category: :auto_devops do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
let(:group_params) { { auto_devops_enabled: '0' } }
let(:service) { described_class.new(group, user, group_params) }
context 'when user does not have enough privileges' do
it 'raises exception' do
group.add_developer(user)
expect do
service.execute
end.to raise_exception(Gitlab::Access::AccessDeniedError)
end
end
context 'when user has enough privileges' do
before do
group.add_owner(user)
end
it 'updates group auto devops enabled accordingly' do
service.execute
expect(group.auto_devops_enabled).to eq(false)
end
context 'when group has projects' do
it 'reflects changes on projects' do
project_1 = create(:project, namespace: group)
service.execute
expect(project_1).not_to have_auto_devops_implicitly_enabled
end
end
context 'when group has subgroups' do
it 'reflects changes on subgroups' do
subgroup_1 = create(:group, parent: group)
service.execute
expect(subgroup_1.auto_devops_enabled?).to eq(false)
end
context 'when subgroups have projects' do
it 'reflects changes on projects' do
subgroup_1 = create(:group, parent: group)
project_1 = create(:project, namespace: subgroup_1)
service.execute
expect(project_1).not_to have_auto_devops_implicitly_enabled
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
class UpdateSharedRunnersService < Groups::BaseService
def execute
return error('Operation not allowed', 403) unless can?(current_user, :admin_group, group)
validate_params
update_shared_runners
update_pending_builds_async
success
rescue ActiveRecord::RecordInvalid, ArgumentError => error
error(error.message)
end
private
def validate_params
unless Namespace::SHARED_RUNNERS_SETTINGS.include?(params[:shared_runners_setting])
raise ArgumentError, "state must be one of: #{Namespace::SHARED_RUNNERS_SETTINGS.join(', ')}"
end
end
def update_shared_runners
case params[:shared_runners_setting]
when Namespace::SR_DISABLED_AND_UNOVERRIDABLE
set_shared_runners_enabled!(false)
when Namespace::SR_DISABLED_WITH_OVERRIDE, Namespace::SR_DISABLED_AND_OVERRIDABLE
disable_shared_runners_and_allow_override!
when Namespace::SR_ENABLED
set_shared_runners_enabled!(true)
end
end
def update_pending_builds?
group.previous_changes.include?('shared_runners_enabled')
end
def update_pending_builds_async
return unless update_pending_builds?
group.run_after_commit_or_now do |group|
pending_builds_params = { instance_runners_enabled: group.shared_runners_enabled }
::Ci::UpdatePendingBuildService.new(group, pending_builds_params).execute
end
end
def set_shared_runners_enabled!(enabled)
group.update!(
shared_runners_enabled: enabled,
allow_descendants_override_disabled_shared_runners: false)
group_ids = group.descendants
unless group_ids.empty?
Group.by_id(group_ids).update_all(
shared_runners_enabled: enabled,
allow_descendants_override_disabled_shared_runners: false)
end
group.all_projects.update_all(shared_runners_enabled: enabled)
end
def disable_shared_runners_and_allow_override!
# enabled -> disabled_and_overridable
if group.shared_runners_enabled?
group.update!(
shared_runners_enabled: false,
allow_descendants_override_disabled_shared_runners: true)
group_ids = group.descendants
Group.by_id(group_ids).update_all(shared_runners_enabled: false) unless group_ids.empty?
group.all_projects.update_all(shared_runners_enabled: false)
# disabled_and_unoverridable -> disabled_and_overridable
else
group.update!(allow_descendants_override_disabled_shared_runners: true)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::UpdateSharedRunnersService, feature_category: :groups_and_projects do
include ReloadHelpers
let(:user) { create(:user) }
let(:params) { {} }
let(:service) { described_class.new(group, user, params) }
describe '#execute' do
subject { service.execute }
context 'when current_user is not the group owner' do
let(:group) { create(:group) }
let(:params) { { shared_runners_setting: 'enabled' } }
before do
group.add_maintainer(user)
end
it 'returns error' do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq('Operation not allowed')
expect(subject[:http_status]).to eq(403)
end
end
context 'when current_user is the group owner' do
before do
group.add_owner(user)
end
context 'enable shared Runners' do
let(:params) { { shared_runners_setting: 'enabled' } }
context 'when ancestor disable shared runners' do
let(:parent) { create(:group, :shared_runners_disabled) }
let(:group) { create(:group, :shared_runners_disabled, parent: parent) }
let!(:project) { create(:project, shared_runners_enabled: false, group: group) }
it 'returns an error and does not enable shared runners' do
expect do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq('Validation failed: Shared runners enabled cannot be enabled because parent group has shared Runners disabled')
reload_models(parent, group, project)
end.to not_change { parent.shared_runners_enabled }
.and not_change { group.shared_runners_enabled }
.and not_change { project.shared_runners_enabled }
end
end
context 'when updating root group' do
let(:group) { create(:group, :shared_runners_disabled) }
let(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
let!(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
it 'enables shared Runners for itself and descendants' do
expect do
expect(subject[:status]).to eq(:success)
reload_models(group, sub_group, project)
end.to change { group.shared_runners_enabled }.from(false).to(true)
.and change { sub_group.shared_runners_enabled }.from(false).to(true)
.and change { project.shared_runners_enabled }.from(false).to(true)
end
context 'when already allowing descendants to override' do
let(:group) { create(:group, :shared_runners_disabled_and_overridable) }
it 'enables shared Runners for itself and descendants' do
expect do
expect(subject[:status]).to eq(:success)
reload_models(group, sub_group, project)
end.to change { group.shared_runners_enabled }.from(false).to(true)
.and change { group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
.and change { sub_group.shared_runners_enabled }.from(false).to(true)
.and change { project.shared_runners_enabled }.from(false).to(true)
end
end
end
context 'when group has pending builds' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
let_it_be(:project) { create(:project, namespace: group, shared_runners_enabled: false) }
let_it_be(:pending_build_1) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
let_it_be(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: false) }
it 'updates pending builds for the group' do
expect(::Ci::UpdatePendingBuildService).to receive(:new).and_call_original
subject
expect(pending_build_1.reload.instance_runners_enabled).to be_truthy
expect(pending_build_2.reload.instance_runners_enabled).to be_truthy
end
context 'when shared runners is not toggled' do
let(:params) { { shared_runners_setting: 'invalid_enabled' } }
it 'does not update pending builds for the group' do
expect(::Ci::UpdatePendingBuildService).not_to receive(:new)
subject
expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
expect(pending_build_2.reload.instance_runners_enabled).to be_falsey
end
end
end
end
context 'disable shared Runners' do
let!(:group) { create(:group) }
let!(:sub_group) { create(:group, :shared_runners_disabled_and_overridable, parent: group) }
let!(:sub_group2) { create(:group, parent: group) }
let!(:project) { create(:project, group: group, shared_runners_enabled: true) }
let!(:project2) { create(:project, group: sub_group2, shared_runners_enabled: true) }
let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE } }
it 'disables shared Runners for all descendant groups and projects' do
expect do
expect(subject[:status]).to eq(:success)
reload_models(group, sub_group, sub_group2, project, project2)
end.to change { group.shared_runners_enabled }.from(true).to(false)
.and not_change { group.allow_descendants_override_disabled_shared_runners }
.and not_change { sub_group.shared_runners_enabled }
.and change { sub_group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
.and change { sub_group2.shared_runners_enabled }.from(true).to(false)
.and not_change { sub_group2.allow_descendants_override_disabled_shared_runners }
.and change { project.shared_runners_enabled }.from(true).to(false)
.and change { project2.shared_runners_enabled }.from(true).to(false)
end
context 'with override on self' do
let(:group) { create(:group, :shared_runners_disabled_and_overridable) }
it 'disables it' do
expect do
expect(subject[:status]).to eq(:success)
group.reload
end
.to not_change { group.shared_runners_enabled }
.and change { group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
end
end
context 'when group has pending builds' do
let!(:pending_build_1) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
let!(:pending_build_2) { create(:ci_pending_build, project: project, instance_runners_enabled: true) }
it 'updates pending builds for the group' do
expect(::Ci::UpdatePendingBuildService).to receive(:new).and_call_original
subject
expect(pending_build_1.reload.instance_runners_enabled).to be_falsey
expect(pending_build_2.reload.instance_runners_enabled).to be_falsey
end
end
end
shared_examples 'allow descendants to override' do
context 'top level group' do
let!(:group) { create(:group, :shared_runners_disabled) }
let!(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
let!(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
it 'enables allow descendants to override only for itself' do
expect do
expect(subject[:status]).to eq(:success)
reload_models(group, sub_group, project)
end.to change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
.and not_change { group.shared_runners_enabled }
.and not_change { sub_group.allow_descendants_override_disabled_shared_runners }
.and not_change { sub_group.shared_runners_enabled }
.and not_change { project.shared_runners_enabled }
end
end
context 'when ancestor disables shared Runners but allows to override' do
let!(:parent) { create(:group, :shared_runners_disabled_and_overridable) }
let!(:group) { create(:group, :shared_runners_disabled, parent: parent) }
let!(:project) { create(:project, shared_runners_enabled: false, group: group) }
it 'enables allow descendants to override' do
expect do
expect(subject[:status]).to eq(:success)
reload_models(parent, group, project)
end
.to not_change { parent.allow_descendants_override_disabled_shared_runners }
.and not_change { parent.shared_runners_enabled }
.and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
.and not_change { group.shared_runners_enabled }
.and not_change { project.shared_runners_enabled }
end
end
context 'when ancestor disables shared runners' do
let(:parent) { create(:group, :shared_runners_disabled) }
let(:group) { create(:group, :shared_runners_disabled, parent: parent) }
let!(:project) { create(:project, shared_runners_enabled: false, group: group) }
it 'returns an error and does not enable shared runners' do
expect do
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq('Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
reload_models(parent, group, project)
end.to not_change { parent.shared_runners_enabled }
.and not_change { group.shared_runners_enabled }
.and not_change { project.shared_runners_enabled }
end
end
context 'top level group that has shared Runners enabled' do
let!(:group) { create(:group, shared_runners_enabled: true) }
let!(:sub_group) { create(:group, shared_runners_enabled: true, parent: group) }
let!(:project) { create(:project, shared_runners_enabled: true, group: sub_group) }
it 'enables allow descendants to override & disables shared runners everywhere' do
expect do
expect(subject[:status]).to eq(:success)
reload_models(group, sub_group, project)
end
.to change { group.shared_runners_enabled }.from(true).to(false)
.and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
.and change { sub_group.shared_runners_enabled }.from(true).to(false)
.and change { project.shared_runners_enabled }.from(true).to(false)
end
end
end
context "when using SR_DISABLED_AND_OVERRIDABLE" do
let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_AND_OVERRIDABLE } }
include_examples 'allow descendants to override'
end
context "when using SR_DISABLED_WITH_OVERRIDE" do
let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE } }
include_examples 'allow descendants to override'
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
class DestroyService < Groups::BaseService
DestroyError = Class.new(StandardError)
def async_execute
job_id = GroupDestroyWorker.perform_async(group.id, current_user.id)
Gitlab::AppLogger.info("User #{current_user.id} scheduled a deletion of group ID #{group.id} with job ID #{job_id}")
end
# rubocop: disable CodeReuse/ActiveRecord
def execute
# TODO - add a policy check here https://gitlab.com/gitlab-org/gitlab/-/issues/353082
raise DestroyError, "You can't delete this group because you're blocked." if current_user.blocked?
group.projects.includes(:project_feature).each do |project|
# Execute the destruction of the models immediately to ensure atomic cleanup.
success = ::Projects::DestroyService.new(project, current_user).execute
raise DestroyError, "Project #{project.id} can't be deleted" unless success
end
# reload the relation to prevent triggering destroy hooks on the projects again
group.projects.reset
group.children.each do |group|
# This needs to be synchronous since the namespace gets destroyed below
DestroyService.new(group, current_user).execute
end
group.chat_team&.remove_mattermost_team(current_user)
user_ids_for_project_authorizations_refresh = obtain_user_ids_for_project_authorizations_refresh
destroy_associated_users
group.destroy
if user_ids_for_project_authorizations_refresh.present?
UserProjectAccessChangedService
.new(user_ids_for_project_authorizations_refresh)
.execute
end
publish_event
group
end
# rubocop: enable CodeReuse/ActiveRecord
private
def any_groups_shared_with_this_group?
group.shared_group_links.any?
end
def any_projects_shared_with_this_group?
group.project_group_links.any?
end
# Destroying a group automatically destroys all project authorizations directly
# associated with the group and descendents. However, project authorizations
# for projects and groups this group is shared with are not. Without a manual
# refresh, the project authorization records of these users to shared projects
# and projects within the shared groups will never be removed, causing
# inconsistencies with access permissions.
#
# This method retrieves the user IDs that need to be refreshed. If only
# groups are shared with this group, only direct members need to be refreshed.
# If projects are also shared with the group, direct members *and* shared
# members of other groups need to be refreshed.
# `Group#user_ids_for_project_authorizations` returns both direct and shared
# members' user IDs.
def obtain_user_ids_for_project_authorizations_refresh
return unless any_projects_shared_with_this_group? || any_groups_shared_with_this_group?
return group.user_ids_for_project_authorizations if any_projects_shared_with_this_group?
group.users_ids_of_direct_members
end
# rubocop:disable CodeReuse/ActiveRecord
def destroy_associated_users
current_user_id = current_user.id
bot_ids = users_to_destroy
group.run_after_commit do
bot_ids.each do |user_id|
DeleteUserWorker.perform_async(current_user_id, user_id, skip_authorization: true)
end
end
end
# rubocop:enable CodeReuse/ActiveRecord
# rubocop:disable CodeReuse/ActiveRecord
def users_to_destroy
group.members_and_requesters.joins(:user)
.merge(User.project_bot)
.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/422405')
.pluck(:user_id)
end
# rubocop:enable CodeReuse/ActiveRecord
def publish_event
event = Groups::GroupDeletedEvent.new(
data: {
group_id: group.id,
root_namespace_id: group.root_ancestor.id
}
)
Gitlab::EventStore.publish(event)
end
end
end
Groups::DestroyService.prepend_mod_with('Groups::DestroyService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::DestroyService, feature_category: :groups_and_projects do
let!(:user) { create(:user) }
let!(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
let!(:project) { create(:project, :repository, :legacy_storage, namespace: group) }
let!(:notification_setting) { create(:notification_setting, source: group) }
let(:remove_path) { group.path + "+#{group.id}+deleted" }
let(:removed_repo) { Gitlab::Git::Repository.new(project.repository_storage, remove_path, nil, nil) }
before do
group.add_member(user, Gitlab::Access::OWNER)
end
def destroy_group(group, user, async)
if async
Groups::DestroyService.new(group, user).async_execute
else
Groups::DestroyService.new(group, user).execute
end
end
shared_examples 'group destruction' do |async|
context 'database records', :sidekiq_might_not_need_inline do
before do
destroy_group(group, user, async)
end
it { expect(Group.unscoped.all).not_to include(group) }
it { expect(Group.unscoped.all).not_to include(nested_group) }
it { expect(Project.unscoped.all).not_to include(project) }
it { expect(NotificationSetting.unscoped.all).not_to include(notification_setting) }
end
context 'bot tokens', :sidekiq_inline do
it 'initiates group bot removal', :aggregate_failures do
bot = create(:user, :project_bot)
group.add_developer(bot)
create(:personal_access_token, user: bot)
destroy_group(group, user, async)
expect(
Users::GhostUserMigration.where(user: bot, initiator_user: user)
).to be_exists
end
end
context 'mattermost team', :sidekiq_might_not_need_inline do
let!(:chat_team) { create(:chat_team, namespace: group) }
it 'destroys the team too' do
expect_next_instance_of(::Mattermost::Team) do |instance|
expect(instance).to receive(:destroy)
end
destroy_group(group, user, async)
end
end
context 'file system', :sidekiq_might_not_need_inline do
context 'Sidekiq inline' do
before do
# Run sidekiq immediately to check that renamed dir will be removed
perform_enqueued_jobs { destroy_group(group, user, async) }
end
it 'verifies that paths have been deleted' do
expect(removed_repo).not_to exist
end
end
end
context 'event store', :sidekiq_might_not_need_inline do
it 'publishes a GroupDeletedEvent' do
expect { destroy_group(group, user, async) }
.to publish_event(Groups::GroupDeletedEvent)
.with(
group_id: group.id,
root_namespace_id: group.root_ancestor.id
)
end
end
end
describe 'asynchronous delete' do
it_behaves_like 'group destruction', true
context 'Sidekiq fake' do
before do
# Don't run Sidekiq to verify that group and projects are not actually destroyed
Sidekiq::Testing.fake! { destroy_group(group, user, true) }
end
it 'verifies original paths and projects still exist' do
expect(removed_repo).not_to exist
expect(Project.unscoped.count).to eq(1)
expect(Group.unscoped.count).to eq(2)
end
end
end
describe 'synchronous delete' do
it_behaves_like 'group destruction', false
end
context 'projects in pending_delete' do
before do
project.pending_delete = true
project.save!
end
it_behaves_like 'group destruction', false
end
context 'repository removal status is taken into account' do
it 'raises exception' do
expect_next_instance_of(::Projects::DestroyService) do |destroy_service|
expect(destroy_service).to receive(:execute).and_return(false)
end
expect { destroy_group(group, user, false) }
.to raise_error(Groups::DestroyService::DestroyError, "Project #{project.id} can't be deleted")
end
end
context 'when group owner is blocked' do
before do
user.block!
end
it 'returns a more descriptive error message' do
expect { destroy_group(group, user, false) }
.to raise_error(Groups::DestroyService::DestroyError, "You can't delete this group because you're blocked.")
end
end
describe 'repository removal' do
before do
destroy_group(group, user, false)
end
context 'legacy storage' do
let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: group) }
it 'removes repository' do
expect(project.repository.raw).not_to exist
end
end
context 'hashed storage' do
let!(:project) { create(:project, :empty_repo, namespace: group) }
it 'removes repository' do
expect(project.repository.raw).not_to exist
end
end
end
describe 'authorization updates', :sidekiq_inline do
context 'for solo groups' do
context 'group is deleted' do
it 'updates project authorization' do
expect { destroy_group(group, user, false) }.to(
change { user.can?(:read_project, project) }.from(true).to(false))
end
it 'does not make use of a specific service to update project_authorizations records' do
expect(UserProjectAccessChangedService)
.not_to receive(:new).with(group.user_ids_for_project_authorizations)
destroy_group(group, user, false)
end
end
end
context 'for shared groups within different hierarchies' do
let(:group1) { create(:group, :private) }
let(:group2) { create(:group, :private) }
let(:group1_user) { create(:user) }
let(:group2_user) { create(:user) }
before do
group1.add_member(group1_user, Gitlab::Access::OWNER)
group2.add_member(group2_user, Gitlab::Access::OWNER)
end
context 'when a project is shared with a group' do
let!(:group1_project) { create(:project, :private, group: group1) }
before do
create(:project_group_link, project: group1_project, group: group2)
end
context 'and the shared group is deleted' do
it 'updates project authorizations so group2 users no longer have access', :aggregate_failures do
expect(group1_user.can?(:read_project, group1_project)).to eq(true)
expect(group2_user.can?(:read_project, group1_project)).to eq(true)
destroy_group(group2, group2_user, false)
expect(group1_user.can?(:read_project, group1_project)).to eq(true)
expect(group2_user.can?(:read_project, group1_project)).to eq(false)
end
it 'calls the service to update project authorizations only with necessary user ids' do
expect(UserProjectAccessChangedService)
.to receive(:new).with(array_including(group2_user.id)).and_call_original
destroy_group(group2, group2_user, false)
end
end
context 'and the group is shared with another group' do
let(:group3) { create(:group, :private) }
let(:group3_user) { create(:user) }
before do
group3.add_member(group3_user, Gitlab::Access::OWNER)
create(:group_group_link, shared_group: group2, shared_with_group: group3)
group3.refresh_members_authorized_projects
end
it 'updates project authorizations so group2 and group3 users no longer have access', :aggregate_failures do
expect(group1_user.can?(:read_project, group1_project)).to eq(true)
expect(group2_user.can?(:read_project, group1_project)).to eq(true)
expect(group3_user.can?(:read_project, group1_project)).to eq(true)
destroy_group(group2, group2_user, false)
expect(group1_user.can?(:read_project, group1_project)).to eq(true)
expect(group2_user.can?(:read_project, group1_project)).to eq(false)
expect(group3_user.can?(:read_project, group1_project)).to eq(false)
end
it 'calls the service to update project authorizations only with necessary user ids' do
expect(UserProjectAccessChangedService)
.to receive(:new).with(array_including(group2_user.id, group3_user.id)).and_call_original
destroy_group(group2, group2_user, false)
end
end
end
context 'when a group is shared with a group' do
let!(:group2_project) { create(:project, :private, group: group2) }
before do
create(:group_group_link, shared_group: group2, shared_with_group: group1)
group1.refresh_members_authorized_projects
end
context 'and the shared group is deleted' do
it 'updates project authorizations since the project has been deleted with the group', :aggregate_failures do
expect(group1_user.can?(:read_project, group2_project)).to eq(true)
expect(group2_user.can?(:read_project, group2_project)).to eq(true)
destroy_group(group2, group2_user, false)
expect(group1_user.can?(:read_project, group2_project)).to eq(false)
expect(group2_user.can?(:read_project, group2_project)).to eq(false)
end
it 'does not call the service to update project authorizations' do
expect(UserProjectAccessChangedService).not_to receive(:new)
destroy_group(group2, group2_user, false)
end
end
context 'the shared_with group is deleted' do
let!(:group2_subgroup) { create(:group, :private, parent: group2) }
let!(:group2_subgroup_project) { create(:project, :private, group: group2_subgroup) }
it 'updates project authorizations so users of both groups lose access', :aggregate_failures do
expect(group1_user.can?(:read_project, group2_project)).to eq(true)
expect(group2_user.can?(:read_project, group2_project)).to eq(true)
expect(group1_user.can?(:read_project, group2_subgroup_project)).to eq(true)
expect(group2_user.can?(:read_project, group2_subgroup_project)).to eq(true)
destroy_group(group1, group1_user, false)
expect(group1_user.can?(:read_project, group2_project)).to eq(false)
expect(group2_user.can?(:read_project, group2_project)).to eq(true)
expect(group1_user.can?(:read_project, group2_subgroup_project)).to eq(false)
expect(group2_user.can?(:read_project, group2_subgroup_project)).to eq(true)
end
it 'calls the service to update project authorizations only with necessary user ids' do
expect(UserProjectAccessChangedService)
.to receive(:new).with([group1_user.id]).and_call_original
destroy_group(group1, group1_user, false)
end
end
end
end
context 'for shared groups in the same group hierarchy' do
let(:shared_group) { group }
let(:shared_with_group) { nested_group }
let!(:shared_with_group_user) { create(:user) }
before do
shared_with_group.add_member(shared_with_group_user, Gitlab::Access::MAINTAINER)
create(:group_group_link, shared_group: shared_group, shared_with_group: shared_with_group)
shared_with_group.refresh_members_authorized_projects
end
context 'the shared group is deleted' do
it 'updates project authorization' do
expect { destroy_group(shared_group, user, false) }.to(
change { shared_with_group_user.can?(:read_project, project) }.from(true).to(false))
end
it 'does not make use of a specific service to update project authorizations' do
# Due to the recursive nature of `Groups::DestroyService`, `UserProjectAccessChangedService`
# will still be executed for the nested group as they fall under the same hierarchy
# and hence we need to account for this scenario.
expect(UserProjectAccessChangedService)
.to receive(:new).with(shared_with_group.users_ids_of_direct_members).and_call_original
expect(UserProjectAccessChangedService)
.not_to receive(:new).with(shared_group.users_ids_of_direct_members)
destroy_group(shared_group, user, false)
end
end
context 'the shared_with group is deleted' do
it 'updates project authorization' do
expect { destroy_group(shared_with_group, user, false) }.to(
change { shared_with_group_user.can?(:read_project, project) }.from(true).to(false))
end
it 'makes use of a specific service to update project authorizations' do
expect(UserProjectAccessChangedService)
.to receive(:new).with(shared_with_group.users_ids_of_direct_members).and_call_original
destroy_group(shared_with_group, user, false)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
class UpdateService < Groups::BaseService
include UpdateVisibilityLevel
SETTINGS_PARAMS = [:allow_mfa_for_subgroups].freeze
def execute
reject_parent_id!
remove_unallowed_params
before_assignment_hook(group, params)
if renaming_group_with_container_registry_images?
group.errors.add(:base, container_images_error)
return false
end
return false unless valid_visibility_level_change?(group, group.visibility_attribute_value(params))
return false unless valid_share_with_group_lock_change?
return false unless valid_path_change?
return false unless update_shared_runners
handle_changes
handle_namespace_settings
group.assign_attributes(params)
begin
success = group.save
after_update if success
success
rescue Gitlab::UpdatePathError => e
group.errors.add(:base, e.message)
false
end
end
private
def valid_path_change?
return true unless group.packages_feature_enabled?
return true if params[:path].blank?
return true if group.has_parent?
return true if !group.has_parent? && group.path == params[:path]
# we have a path change on a root group:
# check that we don't have any npm package with a scope set to the group path
npm_packages = ::Packages::GroupPackagesFinder.new(current_user, group, package_type: :npm, preload_pipelines: false)
.execute
.with_npm_scope(group.path)
return true unless npm_packages.exists?
group.errors.add(:path, s_('GroupSettings|cannot change when group contains projects with NPM packages'))
false
end
def before_assignment_hook(group, params)
@full_path_before = group.full_path
@path_before = group.path
end
def renaming_group_with_container_registry_images?
renaming? && group.has_container_repository_including_subgroups?
end
def renaming?
new_path = params[:path]
new_path && new_path != @path_before
end
def container_images_error
s_("GroupSettings|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again.")
end
def after_update
if group.previous_changes.include?(group.visibility_level_field) && group.private?
# don't enqueue immediately to prevent todos removal in case of a mistake
TodosDestroyer::GroupPrivateWorker.perform_in(Todo::WAIT_FOR_DELETE, group.id)
end
update_two_factor_requirement_for_subgroups
publish_event
end
def update_two_factor_requirement_for_subgroups
settings = group.namespace_settings
return if settings.allow_mfa_for_subgroups
if settings.previous_changes.include?(:allow_mfa_for_subgroups)
# enque in batches members update
DisallowTwoFactorForSubgroupsWorker.perform_async(group.id)
end
end
def reject_parent_id!
params.delete(:parent_id)
end
# overridden in EE
def remove_unallowed_params
params.delete(:emails_disabled) unless can?(current_user, :set_emails_disabled, group)
unless can?(current_user, :update_default_branch_protection, group)
params.delete(:default_branch_protection)
params.delete(:default_branch_protection_defaults)
end
end
def handle_changes
handle_settings_update
handle_crm_settings_update unless params[:crm_enabled].nil?
end
def handle_settings_update
settings_params = params.slice(*allowed_settings_params)
settings_params.merge!({ default_branch_protection: params[:default_branch_protection] }.compact)
allowed_settings_params.each { |param| params.delete(param) }
::NamespaceSettings::UpdateService.new(current_user, group, settings_params).execute
end
def handle_crm_settings_update
crm_enabled = params.delete(:crm_enabled)
return if group.crm_enabled? == crm_enabled
crm_settings = group.crm_settings || group.build_crm_settings
crm_settings.enabled = crm_enabled
crm_settings.save
end
def allowed_settings_params
SETTINGS_PARAMS
end
def valid_share_with_group_lock_change?
return true unless changing_share_with_group_lock?
return true if can?(current_user, :change_share_with_group_lock, group)
group.errors.add(:share_with_group_lock, s_('GroupSettings|cannot be disabled when the parent group "Share with group lock" is enabled, except by the owner of the parent group'))
false
end
def changing_share_with_group_lock?
return false if params[:share_with_group_lock].nil?
params[:share_with_group_lock] != group.share_with_group_lock
end
def update_shared_runners
return true if params[:shared_runners_setting].nil?
result = Groups::UpdateSharedRunnersService.new(group, current_user, shared_runners_setting: params.delete(:shared_runners_setting)).execute
return true if result[:status] == :success
group.errors.add(:update_shared_runners, result[:message])
false
end
def publish_event
return unless renaming?
event = Groups::GroupPathChangedEvent.new(
data: {
group_id: group.id,
root_namespace_id: group.root_ancestor.id,
old_path: @full_path_before,
new_path: group.full_path
}
)
Gitlab::EventStore.publish(event)
end
end
end
Groups::UpdateService.prepend_mod_with('Groups::UpdateService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::UpdateService, feature_category: :groups_and_projects do
let!(:user) { create(:user) }
let!(:private_group) { create(:group, :private) }
let!(:internal_group) { create(:group, :internal) }
let!(:public_group) { create(:group, :public) }
describe "#execute" do
context 'with project' do
let!(:group) { create(:group, :public) }
let(:project) { create(:project, namespace: group) }
context 'located in a subgroup' do
let(:subgroup) { create(:group, parent: group) }
let!(:project) { create(:project, namespace: subgroup) }
before do
subgroup.add_owner(user)
end
it 'does allow a path update if there is not a root namespace change' do
expect(update_group(subgroup, user, path: 'updated')).to be true
expect(subgroup.errors[:path]).to be_empty
end
end
end
context "project visibility_level validation" do
context "public group with public projects" do
let!(:service) { described_class.new(public_group, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL) }
before do
public_group.add_member(user, Gitlab::Access::OWNER)
create(:project, :public, group: public_group)
expect(TodosDestroyer::GroupPrivateWorker).not_to receive(:perform_in)
end
it "does not change permission level" do
service.execute
expect(public_group.errors.count).to eq(1)
expect(TodosDestroyer::GroupPrivateWorker).not_to receive(:perform_in)
end
it "returns false if save failed" do
allow(public_group).to receive(:save).and_return(false)
expect(service.execute).to be_falsey
end
context 'when a project has container images' do
let(:params) { { path: SecureRandom.hex } }
let!(:container_repository) { create(:container_repository, project: project) }
subject { described_class.new(public_group, user, params).execute }
context 'within group' do
let(:project) { create(:project, group: public_group) }
context 'with path updates' do
it 'does not allow the update' do
expect(subject).to be false
expect(public_group.errors[:base].first).to match(/Docker images in their Container Registry/)
end
end
context 'with name updates' do
let(:params) { { name: 'new-name' } }
it 'allows the update' do
expect(subject).to be true
expect(public_group.reload.name).to eq('new-name')
end
end
context 'when the path does not change' do
let(:params) { { name: 'new-name', path: public_group.path } }
it 'allows the update' do
expect(subject).to be true
expect(public_group.reload.name).to eq('new-name')
end
end
end
context 'within subgroup' do
let(:subgroup) { create(:group, parent: public_group) }
let(:project) { create(:project, group: subgroup) }
it 'does not allow path updates' do
expect(subject).to be false
expect(public_group.errors[:base].first).to match(/Docker images in their Container Registry/)
end
end
end
end
context "internal group with internal project" do
let!(:service) { described_class.new(internal_group, user, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
before do
internal_group.add_member(user, Gitlab::Access::OWNER)
create(:project, :internal, group: internal_group)
expect(TodosDestroyer::GroupPrivateWorker).not_to receive(:perform_in)
end
it "does not change permission level" do
service.execute
expect(internal_group.errors.count).to eq(1)
end
end
context "internal group with private project" do
let!(:service) { described_class.new(internal_group, user, visibility_level: Gitlab::VisibilityLevel::PRIVATE) }
before do
internal_group.add_member(user, Gitlab::Access::OWNER)
create(:project, :private, group: internal_group)
expect(TodosDestroyer::GroupPrivateWorker).to receive(:perform_in)
.with(Todo::WAIT_FOR_DELETE, internal_group.id)
end
it "changes permission level to private" do
service.execute
expect(internal_group.visibility_level)
.to eq(Gitlab::VisibilityLevel::PRIVATE)
end
end
end
context "with parent_id user doesn't have permissions for" do
let(:service) { described_class.new(public_group, user, parent_id: private_group.id) }
before do
service.execute
end
it 'does not update parent_id' do
updated_group = public_group.reload
expect(updated_group.parent_id).to be_nil
end
end
context 'crm_enabled param' do
context 'when no existing crm_settings' do
it 'when param not present, leave crm disabled' do
params = {}
described_class.new(public_group, user, params).execute
updated_group = public_group.reload
expect(updated_group.crm_enabled?).to be_falsey
end
it 'when param set true, enables crm' do
params = { crm_enabled: true }
described_class.new(public_group, user, params).execute
updated_group = public_group.reload
expect(updated_group.crm_enabled?).to be_truthy
end
end
context 'with existing crm_settings' do
it 'when param set true, enables crm' do
params = { crm_enabled: true }
create(:crm_settings, group: public_group)
described_class.new(public_group, user, params).execute
updated_group = public_group.reload
expect(updated_group.crm_enabled?).to be_truthy
end
it 'when param set false, disables crm' do
params = { crm_enabled: false }
create(:crm_settings, group: public_group, enabled: true)
described_class.new(public_group, user, params).execute
updated_group = public_group.reload
expect(updated_group.crm_enabled?).to be_falsy
end
it 'when param not present, crm remains disabled' do
params = {}
create(:crm_settings, group: public_group)
described_class.new(public_group, user, params).execute
updated_group = public_group.reload
expect(updated_group.crm_enabled?).to be_falsy
end
it 'when param not present, crm remains enabled' do
params = {}
create(:crm_settings, group: public_group, enabled: true)
described_class.new(public_group, user, params).execute
updated_group = public_group.reload
expect(updated_group.crm_enabled?).to be_truthy
end
end
end
end
context "unauthorized visibility_level validation" do
let!(:service) { described_class.new(internal_group, user, visibility_level: 99) }
before do
internal_group.add_member(user, Gitlab::Access::MAINTAINER)
end
it "does not change permission level" do
service.execute
expect(internal_group.errors.count).to eq(1)
end
end
context "path change validation" do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:project) { create(:project, namespace: subgroup) }
subject(:execute_update) { update_group(target_group, user, update_params) }
shared_examples 'not allowing a path update' do
let(:update_params) { { path: 'updated' } }
it 'does not allow a path update' do
target_group.add_maintainer(user)
expect(execute_update).to be false
expect(target_group.errors[:path]).to include('cannot change when group contains projects with NPM packages')
end
end
shared_examples 'allowing an update' do |on:|
let(:update_params) { { on => 'updated' } }
it "allows an update on #{on}" do
target_group.reload.add_maintainer(user)
expect(execute_update).to be true
expect(target_group.errors).to be_empty
expect(target_group[on]).to eq('updated')
end
end
context 'with namespaced npm packages' do
let_it_be(:package) { create(:npm_package, project: project, name: "@#{group.path}/test") }
context 'updating the root group' do
let_it_be_with_refind(:target_group) { group }
it_behaves_like 'not allowing a path update'
it_behaves_like 'allowing an update', on: :name
end
context 'updating the subgroup' do
let_it_be_with_refind(:target_group) { subgroup }
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
end
end
context 'with scoped npm packages' do
let_it_be(:package) { create(:npm_package, project: project, name: '@any_scope/test') }
context 'updating the root group' do
let_it_be_with_refind(:target_group) { group }
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
end
context 'updating the subgroup' do
let_it_be_with_refind(:target_group) { subgroup }
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
end
end
context 'with unscoped npm packages' do
let_it_be(:package) { create(:npm_package, project: project, name: 'test') }
context 'updating the root group' do
let_it_be_with_refind(:target_group) { group }
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
end
context 'updating the subgroup' do
let_it_be_with_refind(:target_group) { subgroup }
it_behaves_like 'allowing an update', on: :path
it_behaves_like 'allowing an update', on: :name
end
end
end
context 'when user is not group owner' do
context 'when group is private' do
before do
private_group.add_maintainer(user)
end
it 'does not update the group to public' do
result = described_class.new(private_group, user, visibility_level: Gitlab::VisibilityLevel::PUBLIC).execute
expect(result).to eq(false)
expect(private_group.errors.count).to eq(1)
expect(private_group).to be_private
end
it 'does not update the group to public with tricky value' do
result = described_class.new(private_group, user, visibility_level: Gitlab::VisibilityLevel::PUBLIC.to_s + 'r').execute
expect(result).to eq(false)
expect(private_group.errors.count).to eq(1)
expect(private_group).to be_private
end
end
context 'when group is public' do
before do
public_group.add_maintainer(user)
end
it 'does not update the group to private' do
result = described_class.new(public_group, user, visibility_level: Gitlab::VisibilityLevel::PRIVATE).execute
expect(result).to eq(false)
expect(public_group.errors.count).to eq(1)
expect(public_group).to be_public
end
it 'does not update the group to private with invalid string value' do
result = described_class.new(public_group, user, visibility_level: 'invalid').execute
expect(result).to eq(false)
expect(public_group.errors.count).to eq(1)
expect(public_group).to be_public
end
it 'does not update the group to private with valid string value' do
result = described_class.new(public_group, user, visibility_level: 'private').execute
expect(result).to eq(false)
expect(public_group.errors.count).to eq(1)
expect(public_group).to be_public
end
# See https://gitlab.com/gitlab-org/gitlab/-/issues/359910
it 'does not update the group to private because of Active Record typecasting' do
result = described_class.new(public_group, user, visibility_level: 'public').execute
expect(result).to eq(true)
expect(public_group.errors.count).to eq(0)
expect(public_group).to be_public
end
end
end
context 'when updating #emails_disabled' do
let(:service) { described_class.new(internal_group, user, emails_disabled: true) }
it 'updates the attribute' do
internal_group.add_member(user, Gitlab::Access::OWNER)
expect { service.execute }.to change { internal_group.emails_disabled }.to(true)
end
it 'does not update when not group owner' do
expect { service.execute }.not_to change { internal_group.emails_disabled }
end
end
context 'updating default_branch_protection' do
let(:service) do
described_class.new(internal_group, user, default_branch_protection: Gitlab::Access::PROTECTION_NONE)
end
let(:settings) { internal_group.namespace_settings }
let(:expected_settings) { Gitlab::Access::BranchProtection.protection_none.stringify_keys }
context 'for users who have the ability to update default_branch_protection' do
it 'updates default_branch_protection attribute' do
internal_group.add_owner(user)
expect { service.execute }.to change { internal_group.default_branch_protection }.from(Gitlab::Access::PROTECTION_FULL).to(Gitlab::Access::PROTECTION_NONE)
end
it 'updates default_branch_protection_defaults to match default_branch_protection' do
internal_group.add_owner(user)
expect { service.execute }.to change { settings.default_branch_protection_defaults }.from({}).to(expected_settings)
end
end
context 'for users who do not have the ability to update default_branch_protection' do
it 'does not update the attribute' do
expect { service.execute }.not_to change { internal_group.default_branch_protection }
expect { service.execute }.not_to change { internal_group.namespace_settings.default_branch_protection_defaults }
end
end
end
context 'updating default_branch_protection_defaults' do
let(:branch_protection) { ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys }
let(:service) do
described_class.new(internal_group, user, default_branch_protection_defaults: branch_protection)
end
let(:settings) { internal_group.namespace_settings }
let(:expected_settings) { branch_protection }
context 'for users who have the ability to update default_branch_protection_defaults' do
it 'updates default_branch_protection attribute' do
internal_group.add_owner(user)
expect { service.execute }.to change { internal_group.default_branch_protection_defaults }.from({}).to(expected_settings)
end
end
context 'for users who do not have the ability to update default_branch_protection_defaults' do
it 'does not update the attribute' do
expect { service.execute }.not_to change { internal_group.default_branch_protection_defaults }
expect { service.execute }.not_to change { internal_group.namespace_settings.default_branch_protection_defaults }
end
end
end
context 'EventStore' do
let(:service) { described_class.new(group, user, **params) }
let(:root_group) { create(:group, path: 'root') }
let(:group) do
create(:group, parent: root_group, path: 'old-path').tap { |g| g.add_owner(user) }
end
context 'when changing a group path' do
let(:new_path) { SecureRandom.hex }
let(:params) { { path: new_path } }
it 'publishes a GroupPathChangedEvent' do
old_path = group.full_path
expect { service.execute }
.to publish_event(Groups::GroupPathChangedEvent)
.with(
group_id: group.id,
root_namespace_id: group.root_ancestor.id,
old_path: old_path,
new_path: "root/#{new_path}"
)
end
end
context 'when not changing a group path' do
let(:params) { { name: 'very-new-name' } }
it 'does not publish a GroupPathChangedEvent' do
expect { service.execute }
.not_to publish_event(Groups::GroupPathChangedEvent)
end
end
end
context 'rename group' do
let(:new_path) { SecureRandom.hex }
let!(:service) { described_class.new(internal_group, user, path: new_path) }
before do
internal_group.add_member(user, Gitlab::Access::MAINTAINER)
create(:project, :internal, group: internal_group)
end
it 'returns true' do
expect(service.execute).to eq(true)
end
end
context 'for a subgroup' do
let(:subgroup) { create(:group, :private, parent: private_group) }
context 'when the parent group share_with_group_lock is enabled' do
before do
private_group.update_column(:share_with_group_lock, true)
end
context 'for the parent group owner' do
it 'allows disabling share_with_group_lock' do
private_group.add_owner(user)
result = described_class.new(subgroup, user, share_with_group_lock: false).execute
expect(result).to be_truthy
expect(subgroup.reload.share_with_group_lock).to be_falsey
end
end
context 'for a subgroup owner (who does not own the parent)' do
it 'does not allow disabling share_with_group_lock' do
subgroup_owner = create(:user)
subgroup.add_owner(subgroup_owner)
result = described_class.new(subgroup, subgroup_owner, share_with_group_lock: false).execute
expect(result).to be_falsey
expect(subgroup.errors.full_messages.first).to match(/cannot be disabled when the parent group "Share with group lock" is enabled, except by the owner of the parent group/)
expect(subgroup.reload.share_with_group_lock).to be_truthy
end
end
end
end
context 'change shared Runners config' do
let(:group) { create(:group) }
let(:project) { create(:project, shared_runners_enabled: true, group: group) }
subject { described_class.new(group, user, shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE).execute }
before do
group.add_owner(user)
end
it 'calls the shared runners update service' do
expect_any_instance_of(::Groups::UpdateSharedRunnersService).to receive(:execute).and_return({ status: :success })
expect(subject).to be_truthy
end
it 'handles errors in the shared runners update service' do
expect_any_instance_of(::Groups::UpdateSharedRunnersService).to receive(:execute).and_return({ status: :error, message: 'something happened' })
expect(subject).to be_falsy
expect(group.errors[:update_shared_runners].first).to eq('something happened')
end
end
context 'changes allowing subgroups to establish own 2FA' do
let(:group) { create(:group) }
let(:params) { { allow_mfa_for_subgroups: false } }
subject { described_class.new(group, user, params).execute }
it 'changes settings' do
subject
expect(group.namespace_settings.reload.allow_mfa_for_subgroups).to eq(false)
end
it 'enqueues update subgroups and its members' do
expect(DisallowTwoFactorForSubgroupsWorker).to receive(:perform_async).with(group.id)
subject
end
end
def update_group(group, user, opts)
Groups::UpdateService.new(group, user, opts).execute
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
# Service class for counting and caching the number of open issues of a group.
class OpenIssuesCountService < Groups::CountService
PUBLIC_COUNT_KEY = 'group_public_open_issues_count'
TOTAL_COUNT_KEY = 'group_total_open_issues_count'
def clear_all_cache_keys
[cache_key(PUBLIC_COUNT_KEY), cache_key(TOTAL_COUNT_KEY)].each do |key|
Rails.cache.delete(key)
end
end
private
def cache_key_name
public_only? ? PUBLIC_COUNT_KEY : TOTAL_COUNT_KEY
end
def public_only?
!user_is_at_least_reporter?
end
def user_is_at_least_reporter?
strong_memoize(:user_is_at_least_reporter) do
group.member?(user, Gitlab::Access::REPORTER)
end
end
def relation_for_count
IssuesFinder.new(
user,
group_id: group.id,
state: 'opened',
non_archived: true,
include_subgroups: true,
public_only: public_only?
).execute
end
def issuable_key
'open_issues'
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_caching, feature_category: :groups_and_projects do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, namespace: group) }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, :opened, project: project) }
let_it_be(:confidential) { create(:issue, :opened, confidential: true, project: project) }
let_it_be(:closed) { create(:issue, :closed, project: project) }
subject { described_class.new(group, user) }
describe '#relation_for_count' do
before do
allow(IssuesFinder).to receive(:new).and_call_original
end
it 'uses the IssuesFinder to scope issues' do
expect(IssuesFinder)
.to receive(:new)
.with(user, group_id: group.id, state: 'opened', non_archived: true, include_subgroups: true, public_only: true)
subject.count
end
end
describe '#count' do
context 'when user is nil' do
it 'does not include confidential issues in the issue count' do
expect(described_class.new(group).count).to eq(1)
end
end
context 'when user is provided' do
context 'when user can read confidential issues' do
before do
group.add_reporter(user)
end
it 'returns the right count with confidential issues' do
expect(subject.count).to eq(2)
end
end
context 'when user cannot read confidential issues' do
before do
group.add_guest(user)
end
it 'does not include confidential issues' do
expect(subject.count).to eq(1)
end
end
it_behaves_like 'a counter caching service with threshold'
end
end
describe '#clear_all_cache_keys' do
it 'calls `Rails.cache.delete` with the correct keys' do
expect(Rails.cache).to receive(:delete)
.with(['groups', 'open_issues_count_service', 1, group.id, described_class::PUBLIC_COUNT_KEY])
expect(Rails.cache).to receive(:delete)
.with(['groups', 'open_issues_count_service', 1, group.id, described_class::TOTAL_COUNT_KEY])
subject.clear_all_cache_keys
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Groups
class UpdateStatisticsService
attr_reader :group, :statistics
def initialize(group, statistics: [])
@group = group
@statistics = statistics
end
def execute
unless group
return ServiceResponse.error(message: 'Invalid group', http_status: 400)
end
namespace_statistics.refresh!(only: statistics.map(&:to_sym))
ServiceResponse.success(message: 'Group statistics successfully updated.')
end
private
def namespace_statistics
@namespace_statistics ||= group.namespace_statistics || group.build_namespace_statistics
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::UpdateStatisticsService, feature_category: :groups_and_projects do
let_it_be(:group, reload: true) { create(:group) }
let(:statistics) { %w[wiki_size] }
subject(:service) { described_class.new(group, statistics: statistics) }
describe '#execute', :aggregate_failures do
context 'when group is nil' do
let(:group) { nil }
it 'does nothing' do
expect(NamespaceStatistics).not_to receive(:new)
result = service.execute
expect(result).to be_error
end
end
context 'with an existing group' do
context 'when namespace statistics exists for the group' do
it 'uses the existing statistics and refreshes them' do
namespace_statistics = create(:namespace_statistics, namespace: group)
expect(namespace_statistics).to receive(:refresh!).with(only: statistics.map(&:to_sym)).and_call_original
result = service.execute
expect(result).to be_success
end
end
context 'when namespace statistics does not exist for the group' do
it 'creates the statistics and refreshes them' do
expect_next_instance_of(NamespaceStatistics) do |instance|
expect(instance).to receive(:refresh!).with(only: statistics.map(&:to_sym)).and_call_original
end
result = nil
expect do
result = service.execute
end.to change { NamespaceStatistics.count }.by(1)
expect(result).to be_success
end
end
end
end
end
|