INSTRUCTION
stringlengths
202
35.5k
RESPONSE
stringlengths
75
161k
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members # This class serves as more of an app-wide way we add/create members # All roads to add members should take this path. class CreatorService class << self def parsed_access_level(access_level) access_levels.fetch(access_level) { access_level.to_i } end def access_levels Gitlab::Access.sym_options_with_owner end # Add members to sources with passed access option # # access can be an integer representing a access code # or symbol like :maintainer representing role # # Ex. # add_members( # sources, # user_ids, # Member::MAINTAINER # ) # # add_members( # sources, # user_ids, # :maintainer # ) # # @param sources [Group, Project, Array<Group>, Array<Project>, Group::ActiveRecord_Relation, # Project::ActiveRecord_Relation] - Can't be an array of source ids because we don't know the type of source. # @return Array<Member> def add_members(sources, invitees, access_level, **args) return [] unless invitees.present? sources = Array.wrap(sources) if sources.is_a?(ApplicationRecord) # For single source Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification.temporary_ignore_tables_in_transaction( %w[users user_preferences user_details emails identities], url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/424276' ) do Member.transaction do sources.flat_map do |source| # If this user is attempting to manage Owner members and doesn't have permission, do not allow current_user = args[:current_user] next [] if managing_owners?(current_user, access_level) && cannot_manage_owners?(source, current_user) emails, users, existing_members = parse_users_list(source, invitees) common_arguments = { source: source, access_level: access_level, existing_members: existing_members }.merge(parsed_args(args)) members = emails.map do |email| new(invitee: email, builder: InviteMemberBuilder, **common_arguments).execute end members += users.map do |user| new(invitee: user, **common_arguments).execute end members end end end end def add_member(source, invitee, access_level, **args) add_members(source, [invitee], access_level, **args).first end private def parsed_args(args) { current_user: args[:current_user], expires_at: args[:expires_at], ldap: args[:ldap] } end def managing_owners?(current_user, access_level) current_user && Gitlab::Access.sym_options_with_owner[access_level] == Gitlab::Access::OWNER end def parse_users_list(source, list) emails = [] user_ids = [] users = [] existing_members = {} list.each do |item| case item when User users << item when Integer user_ids << item when /\A\d+\Z/ user_ids << item.to_i when Devise.email_regexp emails << item end end # the below will automatically discard invalid user_ids users.concat(User.id_in(user_ids)) if user_ids.present? # de-duplicate just in case as there is no controlling if user records and ids are sent multiple times users.uniq! users_by_emails = source.users_by_emails(emails) # preloads our request store for all emails # in case emails belong to a user that is being invited by user or user_id, remove them from # emails and let users/user_ids handle it. parsed_emails = emails.select do |email| user = users_by_emails[email] !user || (users.exclude?(user) && user_ids.exclude?(user.id)) end if users.present? || users_by_emails.present? # helps not have to perform another query per user id to see if the member exists later on when fetching existing_members = source.members_and_requesters.with_user(users + users_by_emails.values).index_by(&:user_id) end [parsed_emails, users, existing_members] end end def initialize(invitee:, builder: StandardMemberBuilder, **args) @invitee = invitee @builder = builder @args = args @access_level = self.class.parsed_access_level(args[:access_level]) end private_class_method :new def execute find_or_build_member commit_member after_commit_tasks member end private delegate :new_record?, to: :member attr_reader :invitee, :access_level, :member, :args, :builder def assign_member_attributes member.attributes = member_attributes end def commit_member return add_commit_error unless can_commit_member? assign_member_attributes return add_member_role_error if member_role_too_high? commit_changes end def can_commit_member? # There is no current user for bulk actions, in which case anything is allowed return true if skip_authorization? if new_record? can_create_new_member? else can_update_existing_member? end end # overridden in Members::Groups::CreatorService def member_role_too_high? false end def can_create_new_member? raise NotImplementedError end def can_update_existing_member? raise NotImplementedError end # Populates the attributes of a member. # # This logic resides in a separate method so that EE can extend this logic, # without having to patch the `add_members` method directly. def member_attributes { created_by: member.created_by || current_user, access_level: access_level, expires_at: args[:expires_at] } end def commit_changes if member.request? approve_request elsif member.changed? # Calling #save triggers callbacks even if there is no change on object. # This previously caused an incident due to the hard to predict # behaviour caused by the large number of callbacks. # See https://gitlab.com/gitlab-com/gl-infra/production/-/issues/6351 # and https://gitlab.com/gitlab-org/gitlab/-/merge_requests/80920#note_911569038 # for details. member.save end end def after_commit_tasks # hook for overriding in other uses end def approve_request ::Members::ApproveAccessRequestService.new(current_user, access_level: access_level) .execute( member, skip_authorization: ldap || skip_authorization?, skip_log_audit_event: ldap ) end def current_user args[:current_user] end def skip_authorization? !current_user end def add_commit_error msg = if new_record? _('not authorized to create member') else _('not authorized to update member') end member.errors.add(:base, msg) end def add_member_role_error msg = _("the member access level can't be higher than the current user's one") member.errors.add(:base, msg) end def find_or_build_member @member = builder.new(source, invitee, existing_members).execute end def ldap args[:ldap] || false end def source args[:source] end def existing_members args[:existing_members] || {} end end end Members::CreatorService.prepend_mod_with('Members::CreatorService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::CreatorService, feature_category: :groups_and_projects do let_it_be(:source, reload: true) { create(:group, :public) } let_it_be(:member_type) { GroupMember } let_it_be(:user) { create(:user) } let_it_be(:current_user) { create(:user) } describe '#execute' do it 'raises error for new member on authorization check implementation' do expect do described_class.add_member(source, user, :maintainer, current_user: current_user) end.to raise_error(NotImplementedError) end it 'raises error for an existing member on authorization check implementation' do source.add_developer(user) expect do described_class.add_member(source, user, :maintainer, current_user: current_user) end.to raise_error(NotImplementedError) end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members class InviteService < Members::CreateService extend ::Gitlab::Utils::Override def initialize(*args) super @invites += parsed_emails @errors = {} end private alias_method :formatted_errors, :errors def parsed_emails # can't put this in the initializer since `invites_from_params` is called in super class # and needs it @parsed_emails ||= (formatted_param(params[:email]) || []) end def formatted_param(parameter) parameter&.split(',')&.uniq&.flatten end def validate_invitable! super return if params[:email].blank? # we need the below due to add_member hitting Members::CreatorService.parse_users_list and ignoring invalid emails # ideally we wouldn't need this, but we can't really change the add_members method invalid_emails.each { |email| errors[email] = s_('AddMember|Invite email is invalid') } end def invalid_emails parsed_emails.each_with_object([]) do |email, invalid| next if Member.valid_email?(email) invalid << email @invites.delete(email) end end override :blank_invites_message def blank_invites_message s_('AddMember|Invites cannot be blank') end override :add_error_for_member def add_error_for_member(member, existing_errors) errors[invited_object(member)] = all_member_errors(member, existing_errors).to_sentence end def invited_object(member) return member.invite_email if member.invite_email # There is a case where someone was invited by email, but the `user` record exists. # The member record returned will not have an invite_email attribute defined since # the CreatorService finds `user` record sometimes by email. # At that point we lose the info of whether this invite was done by `user` or by email. # Here we will give preference to check invites by user_id first. # There is also a case where a user could be invited by their email and # at the same time via the API in the same request. # This would would mean the same user is invited as user_id and email. # However, that isn't as likely from the UI at least since the token generator checks # for that case and doesn't allow email being used if the user exists as a record already. if member.user_id.to_s.in?(invites) member.user.username else member.user.all_emails.detect { |email| email.in?(invites) } end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_shared_state, :sidekiq_inline, feature_category: :groups_and_projects do let_it_be(:project, reload: true) { create(:project) } let_it_be(:user) { project.first_owner } let_it_be(:project_user) { create(:user) } let_it_be(:user_invited_by_id) { create(:user) } let_it_be(:namespace) { project.namespace } let(:params) { {} } let(:base_params) { { access_level: Gitlab::Access::GUEST, source: project, invite_source: '_invite_source_' } } subject(:result) { described_class.new(user, base_params.merge(params)).execute } context 'when there is a valid member invited' do let(:params) { { email: '[email protected]' } } it 'successfully creates a member' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) end it_behaves_like 'records an onboarding progress action', :user_added end context 'when email belongs to an existing user as a confirmed secondary email' do let(:secondary_email) { create(:email, :confirmed, email: '[email protected]', user: project_user) } let(:params) { { email: secondary_email.email } } it 'adds an existing user to members', :aggregate_failures do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) expect(project.users).to include project_user expect(project.members.last).not_to be_invite end end context 'when email belongs to an existing user as an unconfirmed secondary email' do let(:unconfirmed_secondary_email) { create(:email, email: '[email protected]', user: project_user) } let(:params) { { email: unconfirmed_secondary_email.email } } it 'does not link the email with any user and successfully creates a member as an invite for that email' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) expect(project.users).not_to include project_user expect(project.members.last).to be_invite end end context 'when invites are passed as array' do context 'with emails' do let(:params) { { email: %w[[email protected] [email protected]] } } it 'successfully creates members' do expect_to_create_members(count: 2) expect(result[:status]).to eq(:success) end end context 'with user_id as integers' do let(:params) { { user_id: [project_user.id, user_invited_by_id.id] } } it 'successfully creates members' do expect_to_create_members(count: 2) expect(result[:status]).to eq(:success) end end context 'with user_id as strings' do let(:params) { { user_id: [project_user.id.to_s, user_invited_by_id.id.to_s] } } it 'successfully creates members' do expect_to_create_members(count: 2) expect(result[:status]).to eq(:success) end end context 'with a mixture of emails and user_id' do let(:params) do { user_id: [project_user.id, user_invited_by_id.id], email: %w[[email protected] [email protected]] } end it 'successfully creates members' do expect_to_create_members(count: 4) expect(result[:status]).to eq(:success) end end end context 'with multiple invites passed as strings' do context 'with emails' do let(:params) { { email: '[email protected],[email protected]' } } it 'successfully creates members' do expect_to_create_members(count: 2) expect(result[:status]).to eq(:success) end end context 'with user_id' do let(:params) { { user_id: "#{project_user.id},#{user_invited_by_id.id}" } } it 'successfully creates members' do expect_to_create_members(count: 2) expect(result[:status]).to eq(:success) end end context 'with a mixture of emails and user_id' do let(:params) do { user_id: "#{project_user.id},#{user_invited_by_id.id}", email: '[email protected],[email protected]' } end it 'successfully creates members' do expect_to_create_members(count: 4) expect(result[:status]).to eq(:success) end end end context 'when invites formats are mixed' do context 'when user_id is an array and emails is a string' do let(:params) do { user_id: [project_user.id, user_invited_by_id.id], email: '[email protected],[email protected]' } end it 'successfully creates members' do expect_to_create_members(count: 4) expect(result[:status]).to eq(:success) end end context 'when user_id is a string and emails is an array' do let(:params) do { user_id: "#{project_user.id},#{user_invited_by_id.id}", email: %w[[email protected] [email protected]] } end it 'successfully creates members' do expect_to_create_members(count: 4) expect(result[:status]).to eq(:success) end end end context 'when invites are passed in different formats' do context 'when emails are passed as an empty string' do let(:params) { { email: '' } } it 'returns an error' do expect_not_to_create_members expect(result[:message]).to eq('Invites cannot be blank') end end context 'when user_id are passed as an empty string' do let(:params) { { user_id: '' } } it 'returns an error' do expect_not_to_create_members expect(result[:message]).to eq('Invites cannot be blank') end end context 'when user_id and emails are both passed as empty strings' do let(:params) { { user_id: '', email: '' } } it 'returns an error' do expect_not_to_create_members expect(result[:message]).to eq('Invites cannot be blank') end end context 'when user_id is passed as an integer' do let(:params) { { user_id: project_user.id } } it 'successfully creates member' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) end end context 'when invite params are not included' do it 'returns an error' do expect_not_to_create_members expect(result[:message]).to eq('Invites cannot be blank') end end end context 'when email is not a valid email format' do context 'with singular email' do let(:params) { { email: '_bogus_' } } it 'returns an error' do expect_not_to_create_members expect(result[:status]).to eq(:error) expect(result[:message][params[:email]]).to eq("Invite email is invalid") end it_behaves_like 'does not record an onboarding progress action' end context 'with user_id and singular invalid email' do let(:params) { { user_id: project_user.id, email: '_bogus_' } } it 'has partial success' do expect_to_create_members(count: 1) expect(project.users).to include project_user expect(result[:status]).to eq(:error) expect(result[:message][params[:email]]).to eq("Invite email is invalid") end end context 'with email that has trailing spaces' do let(:params) { { email: ' [email protected] ' } } it 'returns an error' do expect_not_to_create_members expect(result[:status]).to eq(:error) expect(result[:message][params[:email]]).to eq("Invite email is invalid") end it_behaves_like 'does not record an onboarding progress action' end end context 'with duplicate invites' do context 'with duplicate emails' do let(:params) { { email: '[email protected],[email protected]' } } it 'only creates one member per unique invite' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) end end context 'with duplicate user ids' do let(:params) { { user_id: "#{project_user.id},#{project_user.id}" } } it 'only creates one member per unique invite' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) end end context 'with duplicate member by adding as user id and email' do let(:params) { { user_id: project_user.id, email: project_user.email } } it 'only creates one member per unique invite' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) end end end context 'when observing invite limits' do context 'with emails and in general' do let_it_be(:emails) { Array(1..101).map { |n| "email#{n}@example.com" } } context 'when over the allowed default limit of emails' do let(:params) { { email: emails } } it 'limits the number of emails to 100' do expect_not_to_create_members expect(result[:message]).to eq('Too many users specified (limit is 100)') end end context 'when over the allowed custom limit of emails' do let(:params) { { email: '[email protected],[email protected]', limit: 1 } } it 'limits the number of emails to the limit supplied' do expect_not_to_create_members expect(result[:message]).to eq('Too many users specified (limit is 1)') end end context 'when limit allowed is disabled via limit param' do let(:params) { { email: emails, limit: -1 } } it 'does not limit number of emails' do expect_to_create_members(count: 101) expect(result[:status]).to eq(:success) end end end context 'with user_id' do let(:user_id) { 1.upto(101).to_a.join(',') } let(:params) { { user_id: user_id } } it 'limits the number of users to 100' do expect_not_to_create_members expect(result[:message]).to eq('Too many users specified (limit is 100)') end end end context 'with an existing user' do context 'with email' do let(:params) { { email: project_user.email } } it 'adds an existing user to members' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) expect(project.users).to include project_user end end context 'with unconfirmed primary email' do let_it_be(:unconfirmed_user) { create(:user, :unconfirmed) } let(:params) { { email: unconfirmed_user.email } } it 'adds a new member as an invite for unconfirmed primary email' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) expect(project.users).not_to include unconfirmed_user expect(project.members.last).to be_invite end end context 'with user_id' do let(:params) { { user_id: project_user.id } } it_behaves_like 'records an onboarding progress action', :user_added it 'adds an existing user to members' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) expect(project.users).to include project_user end end end context 'when access level is not valid' do context 'with email' do let(:params) { { email: project_user.email, access_level: -1 } } it 'returns an error' do expect_not_to_create_members expect(result[:message][project_user.email]).to eq("Access level is not included in the list") end end context 'with user_id' do let(:params) { { user_id: user_invited_by_id.id, access_level: -1 } } it 'returns an error' do expect_not_to_create_members expect(result[:message][user_invited_by_id.username]).to eq("Access level is not included in the list") end end context 'with a mix of user_id and email' do let(:params) { { user_id: user_invited_by_id.id, email: project_user.email, access_level: -1 } } it 'returns errors' do expect_not_to_create_members expect(result[:message][project_user.email]).to eq("Access level is not included in the list") expect(result[:message][user_invited_by_id.username]).to eq("Access level is not included in the list") end end end context 'when member already exists' do context 'with email' do let!(:invited_member) { create(:project_member, :guest, :invited, project: project) } let(:params) do { email: "#{invited_member.invite_email},#{project_user.email}", access_level: ProjectMember::MAINTAINER } end it 'adds new email and allows already invited members to be re-invited by email and updates the member access' do expect_to_create_members(count: 1) expect(result[:status]).to eq(:success) expect(project.users).to include project_user expect(invited_member.reset.access_level).to eq ProjectMember::MAINTAINER end end context 'when email is already a member with a user on the project' do let!(:existing_member) { create(:project_member, :guest, project: project) } let(:params) { { email: existing_member.user.email.to_s, access_level: ProjectMember::MAINTAINER } } it 'allows re-invite of an already invited email and updates the access_level' do expect { result }.not_to change(ProjectMember, :count) expect(result[:status]).to eq(:success) expect(existing_member.reset.access_level).to eq ProjectMember::MAINTAINER end context 'when email belongs to an existing user as a confirmed secondary email' do let(:secondary_email) { create(:email, :confirmed, email: '[email protected]', user: existing_member.user) } let(:params) { { email: secondary_email.email.to_s } } it 'allows re-invite to an already invited email' do expect_to_create_members(count: 0) expect(result[:status]).to eq(:success) end end end context 'with user_id that already exists' do let!(:existing_member) { create(:project_member, project: project, user: project_user) } let(:params) { { user_id: existing_member.user_id } } it 'does not add the member again and is successful' do expect_to_create_members(count: 0) expect(project.users).to include project_user end end context 'with user_id that already exists with a lower access_level' do let!(:existing_member) { create(:project_member, :developer, project: project, user: project_user) } let(:params) { { user_id: existing_member.user_id, access_level: ProjectMember::MAINTAINER } } it 'does not add the member again and updates the access_level' do expect_to_create_members(count: 0) expect(project.users).to include project_user expect(existing_member.reset.access_level).to eq ProjectMember::MAINTAINER end end context 'with user_id that already exists with a higher access_level' do let!(:existing_member) { create(:project_member, :developer, project: project, user: project_user) } let(:params) { { user_id: existing_member.user_id, access_level: ProjectMember::GUEST } } it 'does not add the member again and updates the access_level' do expect_to_create_members(count: 0) expect(project.users).to include project_user expect(existing_member.reset.access_level).to eq ProjectMember::GUEST end end context 'with user_id that already exists in a parent group' do let_it_be(:group) { create(:group) } let_it_be(:group_member) { create(:group_member, :developer, source: group, user: project_user) } let_it_be(:project, reload: true) { create(:project, group: group) } let_it_be(:user) { project.creator } before_all do project.add_maintainer(user) end context 'when access_level is lower than inheriting member' do let(:params) { { user_id: group_member.user_id, access_level: ProjectMember::GUEST } } it 'does not add the member and returns an error' do msg = "Access level should be greater than or equal " \ "to Developer inherited membership from group #{group.name}" expect_not_to_create_members expect(result[:message][project_user.username]).to eq msg end end context 'when access_level is the same as the inheriting member' do let(:params) { { user_id: group_member.user_id, access_level: ProjectMember::DEVELOPER } } it 'adds the member with correct access_level' do expect_to_create_members(count: 1) expect(project.users).to include project_user expect(project.project_members.last.access_level).to eq ProjectMember::DEVELOPER end end context 'when access_level is greater than the inheriting member' do let(:params) { { user_id: group_member.user_id, access_level: ProjectMember::MAINTAINER } } it 'adds the member with correct access_level' do expect_to_create_members(count: 1) expect(project.users).to include project_user expect(project.project_members.last.access_level).to eq ProjectMember::MAINTAINER end end end end def expect_to_create_members(count:) expect { result }.to change(ProjectMember, :count).by(count) end def expect_not_to_create_members expect { result }.not_to change(ProjectMember, :count) expect(result[:status]).to eq(:error) end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members class DestroyService < Members::BaseService include Gitlab::ExclusiveLeaseHelpers def execute( member, skip_authorization: false, skip_subresources: false, unassign_issuables: false, destroy_bot: false, skip_saml_identity: false ) unless skip_authorization raise Gitlab::Access::AccessDeniedError unless authorized?(member, destroy_bot) raise Gitlab::Access::AccessDeniedError if destroying_member_with_owner_access_level?(member) && cannot_revoke_owner_responsibilities_from_member_in_project?(member) end @skip_auth = skip_authorization if a_group_owner?(member) process_destroy_of_group_owner_member(member, skip_subresources, skip_saml_identity) else destroy_member(member) destroy_data_related_to_member(member, skip_subresources, skip_saml_identity) end enqueue_jobs_that_needs_to_be_run_only_once_per_hierarchy(member, unassign_issuables) member end # We use this to mark recursive calls made to this service from within the same service. # We do this so as to help us run some tasks that needs to be run only once per hierarchy, and not recursively. def mark_as_recursive_call @recursive_call = true end private # These actions need to be executed only once per hierarchy because the underlying services # apply these actions to the entire hierarchy anyway, so there is no need to execute them recursively. def enqueue_jobs_that_needs_to_be_run_only_once_per_hierarchy(member, unassign_issuables) return if recursive_call? enqueue_cleanup_jobs_once_per_heirarchy(member, unassign_issuables) end def enqueue_cleanup_jobs_once_per_heirarchy(member, unassign_issuables) enqueue_delete_todos(member) enqueue_unassign_issuables(member) if unassign_issuables end def recursive_call? @recursive_call == true end def process_destroy_of_group_owner_member(member, skip_subresources, skip_saml_identity) # Deleting 2 different group owners via the API in quick succession could lead to # wrong results for the `last_owner?` check due to race conditions. To prevent this # we wrap both the last_owner? check and the deletes of owners within a lock. last_group_owner = true in_lock("delete_members:#{member.source.class}:#{member.source.id}", sleep_sec: 0.1.seconds) do break if member.source.last_owner?(member.user) last_group_owner = false destroy_member(member) end # deletion of related data does not have to be within the lock. destroy_data_related_to_member(member, skip_subresources, skip_saml_identity) unless last_group_owner end def destroy_member(member) member.destroy end def destroy_data_related_to_member(member, skip_subresources, skip_saml_identity) member.user&.invalidate_cache_counts delete_member_associations(member, skip_subresources, skip_saml_identity) end def a_group_owner?(member) member.is_a?(GroupMember) && member.owner? end def delete_member_associations(member, skip_subresources, skip_saml_identity) if member.request? && member.user != current_user notification_service.decline_access_request(member) end delete_subresources(member) unless skip_subresources delete_project_invitations_by(member) unless skip_subresources resolve_access_request_todos(member) after_execute(member: member, skip_saml_identity: skip_saml_identity) end def authorized?(member, destroy_bot) return can_destroy_bot_member?(member) if destroy_bot if member.request? return can_destroy_member_access_request?(member) || can_withdraw_member_access_request?(member) end can_destroy_member?(member) end def delete_subresources(member) return unless member.is_a?(GroupMember) && member.user && member.group delete_project_members(member) delete_subgroup_members(member) delete_invited_members(member) end def delete_project_members(member) groups = member.group.self_and_descendants destroy_project_members(ProjectMember.in_namespaces(groups).with_user(member.user)) end def delete_subgroup_members(member) groups = member.group.descendants destroy_group_members(GroupMember.of_groups(groups).with_user(member.user)) end def delete_invited_members(member) groups = member.group.self_and_descendants destroy_group_members(GroupMember.of_groups(groups).not_accepted_invitations_by_user(member.user)) destroy_project_members(ProjectMember.in_namespaces(groups).not_accepted_invitations_by_user(member.user)) end def destroy_project_members(members) members.each do |project_member| service = self.class.new(current_user) service.mark_as_recursive_call service.execute(project_member, skip_authorization: @skip_auth) end end def destroy_group_members(members) members.each do |group_member| service = self.class.new(current_user) service.mark_as_recursive_call service.execute(group_member, skip_authorization: @skip_auth, skip_subresources: true) end end def delete_project_invitations_by(member) return unless member.is_a?(ProjectMember) && member.user && member.project members_to_delete = member.project.members.not_accepted_invitations_by_user(member.user) destroy_project_members(members_to_delete) end def can_destroy_member?(member) can?(current_user, destroy_member_permission(member), member) end def can_destroy_bot_member?(member) can?(current_user, destroy_bot_member_permission(member), member) end def can_destroy_member_access_request?(member) can?(current_user, :admin_member_access_request, member.source) end def can_withdraw_member_access_request?(member) can?(current_user, :withdraw_member_access_request, member) end def destroying_member_with_owner_access_level?(member) member.owner? end def destroy_member_permission(member) case member when GroupMember :destroy_group_member when ProjectMember :destroy_project_member else raise "Unknown member type: #{member}!" end end def destroy_bot_member_permission(member) raise "Unsupported bot member type: #{member}" unless member.is_a?(ProjectMember) :destroy_project_bot_member end def enqueue_unassign_issuables(member) source_type = member.is_a?(GroupMember) ? 'Group' : 'Project' member.run_after_commit_or_now do MembersDestroyer::UnassignIssuablesWorker.perform_async(member.user_id, member.source_id, source_type) end end end end Members::DestroyService.prepend_mod_with('Members::DestroyService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::DestroyService, feature_category: :groups_and_projects do let(:current_user) { create(:user) } let(:member_user) { create(:user) } let(:group) { create(:group, :public) } let(:group_project) { create(:project, :public, group: group) } let(:opts) { {} } shared_examples 'a service raising ActiveRecord::RecordNotFound' do it 'raises ActiveRecord::RecordNotFound' do expect { described_class.new(current_user).execute(member) }.to raise_error(ActiveRecord::RecordNotFound) end end shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do it 'raises Gitlab::Access::AccessDeniedError' do expect { described_class.new(current_user).execute(member) }.to raise_error(Gitlab::Access::AccessDeniedError) end end shared_examples 'a service destroying a member' do before do type = member.is_a?(GroupMember) ? 'Group' : 'Project' expect(TodosDestroyer::EntityLeaveWorker).to receive(:perform_in).with(Todo::WAIT_FOR_DELETE, member.user_id, member.source_id, type) expect(MembersDestroyer::UnassignIssuablesWorker).to receive(:perform_async).with(member.user_id, member.source_id, type) if opts[:unassign_issuables] end it 'destroys the member' do expect { described_class.new(current_user).execute(member, **opts) }.to change { member.source.members_and_requesters.count }.by(-1) end it 'destroys member notification_settings' do if member_user.notification_settings.any? expect { described_class.new(current_user).execute(member, **opts) } .to change { member_user.notification_settings.count }.by(-1) else expect { described_class.new(current_user).execute(member, **opts) } .not_to change { member_user.notification_settings.count } end end it 'resolves the access request todos for the owner' do expect_next_instance_of(described_class) do |instance| expect(instance).to receive(:resolve_access_request_todos).with(member) end described_class.new(current_user).execute(member, **opts) end end shared_examples 'a service destroying a member with access' do it_behaves_like 'a service destroying a member' it 'invalidates cached counts for assigned issues and merge requests', :aggregate_failures, :sidekiq_might_not_need_inline do create(:issue, project: group_project, assignees: [member_user]) create(:merge_request, source_project: group_project, assignees: [member_user]) create(:todo, :pending, project: group_project, user: member_user) create(:todo, :done, project: group_project, user: member_user) expect(member_user.assigned_open_merge_requests_count).to be(1) expect(member_user.assigned_open_issues_count).to be(1) expect(member_user.todos_pending_count).to be(1) expect(member_user.todos_done_count).to be(1) service = described_class.new(current_user) if opts[:unassign_issuables] expect(service).to receive(:enqueue_unassign_issuables).with(member) end service.execute(member, **opts) expect(member_user.assigned_open_merge_requests_count).to be(0) expect(member_user.assigned_open_issues_count).to be(0) expect(member_user.todos_pending_count).to be(0) expect(member_user.todos_done_count).to be(0) unless opts[:unassign_issuables] expect(member_user.assigned_merge_requests.opened.count).to be(1) expect(member_user.assigned_issues.opened.count).to be(1) end end end shared_examples 'a service destroying an access request of another user' do it_behaves_like 'a service destroying a member' it 'calls Member#after_decline_request' do expect_any_instance_of(NotificationService).to receive(:decline_access_request).with(member) described_class.new(current_user).execute(member, **opts) end end shared_examples 'a service destroying an access request of self' do it_behaves_like 'a service destroying a member' context 'when current user is the member' do it 'does not call Member#after_decline_request' do expect_any_instance_of(NotificationService).not_to receive(:decline_access_request).with(member) described_class.new(current_user).execute(member, **opts) end end end context 'With ExclusiveLeaseHelpers' do include ExclusiveLeaseHelpers let(:lock_key) do "delete_members:#{member_to_delete.source.class}:#{member_to_delete.source.id}" end let(:timeout) { 1.minute } let(:service_object) { described_class.new(current_user) } subject(:destroy_member) { service_object.execute(member_to_delete, **opts) } context 'for group members' do before do group.add_owner(current_user) end context 'deleting group owners' do let!(:member_to_delete) { group.add_owner(member_user) } context 'locking to avoid race conditions' do it 'tries to perform the delete within a lock' do expect_to_obtain_exclusive_lease(lock_key, timeout: timeout) destroy_member end context 'based on status of the lock' do context 'when lock is obtained' do it 'destroys the membership' do expect_to_obtain_exclusive_lease(lock_key, timeout: timeout) expect { destroy_member }.to change { group.members.count }.by(-1) end end context 'when the lock cannot be obtained' do before do stub_exclusive_lease_taken(lock_key, timeout: timeout) end it 'raises error' do expect { destroy_member }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError) end end end end end context 'deleting group members that are not owners' do let!(:member_to_delete) { group.add_developer(member_user) } it 'does not try to perform the deletion of the member within a lock' do # We need to account for other places involved in the Member deletion process that # uses ExclusiveLease. # 1. `UpdateHighestRole` concern uses locks to peform work # whenever a Member is committed, so that needs to be accounted for. lock_key_for_update_highest_role = "update_highest_role:#{member_to_delete.user_id}" expect(Gitlab::ExclusiveLease) .to receive(:new).with(lock_key_for_update_highest_role, timeout: 10.minutes.to_i).and_call_original # 2. `Users::RefreshAuthorizedProjectsService` also uses locks to perform work, # whenever a user's authorizations has to be refreshed, so that needs to be accounted for as well. lock_key_for_authorizations_refresh = "refresh_authorized_projects:#{member_to_delete.user_id}" expect(Gitlab::ExclusiveLease) .to receive(:new).with(lock_key_for_authorizations_refresh, timeout: 1.minute.to_i).and_call_original # We do not use any locks for the member deletion process, from within this service. expect(Gitlab::ExclusiveLease) .not_to receive(:new).with(lock_key, timeout: timeout) destroy_member end it 'destroys the membership' do expect { destroy_member }.to change { group.members.count }.by(-1) end end end context 'for project members' do shared_examples_for 'deletes the project member without using a lock' do it 'does not try to perform the deletion of a project member within a lock' do # We need to account for other places involved in the Member deletion process that # uses ExclusiveLease. # 1. `UpdateHighestRole` concern uses locks to peform work # whenever a Member is committed, so that needs to be accounted for. lock_key_for_update_highest_role = "update_highest_role:#{member_to_delete.user_id}" expect(Gitlab::ExclusiveLease) .to receive(:new).with(lock_key_for_update_highest_role, timeout: 10.minutes.to_i).and_call_original # 2. `AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker` also uses locks to perform work, # whenever a user's authorizations has to be refreshed, so that needs to be accounted for as well. lock_key_for_authorizations_refresh = "authorized_project_update/project_recalculate_worker/projects/#{member_to_delete.project.id}" expect(Gitlab::ExclusiveLease) .to receive(:new).with(lock_key_for_authorizations_refresh, timeout: 10.seconds).and_call_original # We do not use any locks for the member deletion process, from within this service. expect(Gitlab::ExclusiveLease) .not_to receive(:new).with(lock_key, timeout: timeout) destroy_member end it 'destroys the membership' do expect { destroy_member }.to change { entity.members.count }.by(-1) end end before do group_project.add_owner(current_user) end context 'deleting project owners' do context 'deleting project owners' do let!(:member_to_delete) { entity.add_owner(member_user) } it_behaves_like 'deletes the project member without using a lock' do let(:entity) { group_project } end end end context 'deleting project members that are not owners' do let!(:member_to_delete) { group_project.add_developer(member_user) } it_behaves_like 'deletes the project member without using a lock' do let(:entity) { group_project } end end end end context 'with a member with access' do before do group_project.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PRIVATE) group.update_attribute(:visibility_level, Gitlab::VisibilityLevel::PRIVATE) end context 'when current user cannot destroy the given member' do context 'with a project member' do let(:member) { group_project.members.find_by(user_id: member_user.id) } context 'when current user does not have any membership management permissions' do before do group_project.add_developer(member_user) end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' context 'when skipping authorisation' do it_behaves_like 'a service destroying a member with access' do let(:opts) { { skip_authorization: true, unassign_issuables: true } } end end end context 'when a project maintainer tries to destroy a project owner' do before do group_project.add_owner(member_user) end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' context 'when skipping authorisation' do it_behaves_like 'a service destroying a member with access' do let(:opts) { { skip_authorization: true, unassign_issuables: true } } end end end end end context 'with a group member' do let(:member) { group.members.find_by(user_id: member_user.id) } before do group.add_developer(member_user) end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' context 'when skipping authorisation' do it_behaves_like 'a service destroying a member with access' do let(:opts) { { skip_authorization: true, unassign_issuables: true } } end end end context 'when current user can destroy the given member' do before do group_project.add_maintainer(current_user) group.add_owner(current_user) end context 'with a project member' do let(:member) { group_project.members.find_by(user_id: member_user.id) } before do group_project.add_developer(member_user) end it_behaves_like 'a service destroying a member with access' context 'unassign issuables' do it_behaves_like 'a service destroying a member with access' do let(:opts) { { unassign_issuables: true } } end end end context 'with a project bot member' do let(:member) { group_project.members.find_by(user_id: member_user.id) } let(:member_user) { create(:user, :project_bot) } before do group_project.add_maintainer(member_user) end context 'when the destroy_bot flag is true' do it_behaves_like 'a service destroying a member with access' do let(:opts) { { destroy_bot: true } } end end context 'when the destroy_bot flag is not specified' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' end end context 'with a group member' do let(:member) { group.members.find_by(user_id: member_user.id) } before do group.add_developer(member_user) end it_behaves_like 'a service destroying a member with access' context 'unassign issuables' do it_behaves_like 'a service destroying a member with access' do let(:opts) { { unassign_issuables: true } } end end end end end context 'with an access requester' do before do group_project.update!(request_access_enabled: true) group.update!(request_access_enabled: true) group_project.request_access(member_user) group.request_access(member_user) end context 'when current user cannot destroy the given access requester' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:member) { group_project.requesters.find_by(user_id: member_user.id) } end it_behaves_like 'a service destroying a member' do let(:opts) { { skip_authorization: true, skip_subresources: true } } let(:member) { group_project.requesters.find_by(user_id: member_user.id) } end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:member) { group.requesters.find_by(user_id: member_user.id) } end it_behaves_like 'a service destroying a member' do let(:opts) { { skip_authorization: true, skip_subresources: true } } let(:member) { group.requesters.find_by(user_id: member_user.id) } end end context 'when current user can destroy the given access requester' do let(:opts) { { skip_subresources: true } } before do group_project.add_maintainer(current_user) group.add_owner(current_user) end it_behaves_like 'a service destroying an access request of another user' do let(:member) { group_project.requesters.find_by(user_id: member_user.id) } end it_behaves_like 'a service destroying an access request of another user' do let(:member) { group.requesters.find_by(user_id: member_user.id) } end end context 'on withdrawing their own access request' do let(:opts) { { skip_subresources: true } } let(:current_user) { member_user } it_behaves_like 'a service destroying an access request of self' do let(:member) { group_project.requesters.find_by(user_id: member_user.id) } end it_behaves_like 'a service destroying an access request of self' do let(:member) { group.requesters.find_by(user_id: member_user.id) } end end end context 'with an invited user' do let(:project_invited_member) { create(:project_member, :invited, project: group_project) } let(:group_invited_member) { create(:group_member, :invited, group: group) } context 'when current user cannot destroy the given invited user' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:member) { project_invited_member } end it_behaves_like 'a service destroying a member' do let(:opts) { { skip_authorization: true } } let(:member) { project_invited_member } end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:member) { group_invited_member } end it_behaves_like 'a service destroying a member' do let(:opts) { { skip_authorization: true } } let(:member) { group_invited_member } end end context 'when current user can destroy the given invited user' do before do group_project.add_maintainer(current_user) group.add_owner(current_user) end # Regression spec for issue: https://gitlab.com/gitlab-org/gitlab-foss/issues/32504 it_behaves_like 'a service destroying a member' do let(:member) { project_invited_member } end it_behaves_like 'a service destroying a member' do let(:member) { group_invited_member } end end end context 'subresources' do let_it_be_with_reload(:user) { create(:user) } let_it_be_with_reload(:member_user) { create(:user) } let_it_be_with_reload(:group) { create(:group, :public) } let_it_be_with_reload(:subgroup) { create(:group, parent: group) } let_it_be(:private_subgroup) { create(:group, :private, parent: group, name: 'private_subgroup') } let_it_be(:private_subgroup_with_direct_membership) { create(:group, :private, parent: group) } let_it_be_with_reload(:subsubgroup) { create(:group, parent: subgroup) } let_it_be_with_reload(:group_project) { create(:project, :public, group: group) } let_it_be_with_reload(:control_project) { create(:project, :private, group: subsubgroup) } let_it_be_with_reload(:subsubproject) { create(:project, :public, group: subsubgroup) } let_it_be(:private_subgroup_project) do create(:project, :private, group: private_subgroup, name: 'private_subgroup_project') end let_it_be(:private_subgroup_with_direct_membership_project) do create(:project, :private, group: private_subgroup_with_direct_membership, name: 'private_subgroup_project') end context 'with memberships' do before do subgroup.add_developer(member_user) subsubgroup.add_developer(member_user) subsubproject.add_developer(member_user) group_project.add_developer(member_user) control_project.add_maintainer(user) private_subgroup_with_direct_membership.add_developer(member_user) group.add_owner(user) @group_member = create(:group_member, :developer, group: group, user: member_user) end let_it_be(:todo_in_public_group_project) do create(:todo, :pending, project: group_project, user: member_user, target: create(:issue, project: group_project) ) end let_it_be(:mr_in_public_group_project) do create(:merge_request, source_project: group_project, assignees: [member_user]) end let_it_be(:todo_in_private_subgroup_project) do create(:todo, :pending, project: private_subgroup_project, user: member_user, target: create(:issue, project: private_subgroup_project) ) end let_it_be(:mr_in_private_subgroup_project) do create(:merge_request, source_project: private_subgroup_project, assignees: [member_user]) end let_it_be(:todo_in_public_subsubgroup_project) do create(:todo, :pending, project: subsubproject, user: member_user, target: create(:issue, project: subsubproject) ) end let_it_be(:mr_in_public_subsubgroup_project) do create(:merge_request, source_project: subsubproject, assignees: [member_user]) end let_it_be(:todo_in_private_subgroup_with_direct_membership_project) do create(:todo, :pending, project: private_subgroup_with_direct_membership_project, user: member_user, target: create(:issue, project: private_subgroup_with_direct_membership_project) ) end let_it_be(:mr_in_private_subgroup_with_direct_membership_project) do create(:merge_request, source_project: private_subgroup_with_direct_membership_project, assignees: [member_user] ) end context 'with skipping of subresources' do subject(:execute_service) { described_class.new(user).execute(@group_member, skip_subresources: true) } before do execute_service end it 'removes the group membership' do expect(group.members.map(&:user)).not_to include(member_user) end it 'does not remove the project membership' do expect(group_project.members.map(&:user)).to include(member_user) end it 'does not remove the subgroup membership' do expect(subgroup.members.map(&:user)).to include(member_user) end it 'does not remove the subsubgroup membership' do expect(subsubgroup.members.map(&:user)).to include(member_user) end it 'does not remove the subsubproject membership' do expect(subsubproject.members.map(&:user)).to include(member_user) end it 'does not remove the user from the control project' do expect(control_project.members.map(&:user)).to include(user) end context 'todos', :sidekiq_inline do it 'removes todos for which the user no longer has access' do expect(member_user.todos).to include( todo_in_public_group_project, todo_in_public_subsubgroup_project, todo_in_private_subgroup_with_direct_membership_project ) expect(member_user.todos).not_to include(todo_in_private_subgroup_project) end end context 'issuables', :sidekiq_inline do subject(:execute_service) do described_class.new(user).execute(@group_member, skip_subresources: true, unassign_issuables: true) end it 'removes assigned issuables, even in subresources' do expect(member_user.assigned_merge_requests).to be_empty end end end context 'without skipping of subresources' do subject(:execute_service) { described_class.new(user).execute(@group_member, skip_subresources: false) } before do execute_service end it 'removes the project membership' do expect(group_project.members.map(&:user)).not_to include(member_user) end it 'removes the group membership' do expect(group.members.map(&:user)).not_to include(member_user) end it 'removes the subgroup membership' do expect(subgroup.members.map(&:user)).not_to include(member_user) end it 'removes the subsubgroup membership' do expect(subsubgroup.members.map(&:user)).not_to include(member_user) end it 'removes the subsubproject membership' do expect(subsubproject.members.map(&:user)).not_to include(member_user) end it 'does not remove the user from the control project' do expect(control_project.members.map(&:user)).to include(user) end context 'todos', :sidekiq_inline do it 'removes todos for which the user no longer has access' do expect(member_user.todos).to include( todo_in_public_group_project, todo_in_public_subsubgroup_project ) expect(member_user.todos).not_to include( todo_in_private_subgroup_project, todo_in_private_subgroup_with_direct_membership_project ) end end context 'issuables', :sidekiq_inline do subject(:execute_service) do described_class.new(user).execute(@group_member, skip_subresources: false, unassign_issuables: true) end it 'removes assigned issuables' do expect(member_user.assigned_merge_requests).to be_empty end end end end context 'with invites' do before do create(:group_member, :developer, group: subsubgroup, user: member_user) create(:project_member, :invited, project: group_project, created_by: member_user) create(:group_member, :invited, group: group, created_by: member_user) create(:project_member, :invited, project: subsubproject, created_by: member_user) create(:group_member, :invited, group: subgroup, created_by: member_user) subsubproject.add_developer(member_user) control_project.add_maintainer(user) group.add_owner(user) @group_member = create(:group_member, :developer, group: group, user: member_user) end context 'with skipping of subresources' do before do described_class.new(user).execute(@group_member, skip_subresources: true) end it 'does not remove group members invited by deleted user' do expect(group.members.not_accepted_invitations_by_user(member_user)).not_to be_empty end it 'does not remove project members invited by deleted user' do expect(group_project.members.not_accepted_invitations_by_user(member_user)).not_to be_empty end it 'does not remove subgroup members invited by deleted user' do expect(subgroup.members.not_accepted_invitations_by_user(member_user)).not_to be_empty end it 'does not remove subproject members invited by deleted user' do expect(subsubproject.members.not_accepted_invitations_by_user(member_user)).not_to be_empty end end context 'without skipping of subresources' do before do described_class.new(user).execute(@group_member, skip_subresources: false) end it 'removes group members invited by deleted user' do expect(group.members.not_accepted_invitations_by_user(member_user)).to be_empty end it 'removes project members invited by deleted user' do expect(group_project.members.not_accepted_invitations_by_user(member_user)).to be_empty end it 'removes subgroup members invited by deleted user' do expect(subgroup.members.not_accepted_invitations_by_user(member_user)).to be_empty end it 'removes subproject members invited by deleted user' do expect(subsubproject.members.not_accepted_invitations_by_user(member_user)).to be_empty end end end end context 'deletion of invitations created by deleted project member' do let(:user) { project.first_owner } let(:member_user) { create(:user) } let(:project) { create(:project) } before do create(:project_member, :invited, project: project, created_by: member_user) project_member = create(:project_member, :maintainer, user: member_user, project: project) described_class.new(user).execute(project_member) end it 'removes project members invited by deleted user' do expect(project.members.not_accepted_invitations_by_user(member_user)).to be_empty end end describe '#mark_as_recursive_call' do it 'marks the instance as recursive' do service = described_class.new(current_user) service.mark_as_recursive_call expect(service.send(:recursive_call?)).to eq(true) end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members class RequestAccessService < Members::BaseService def execute(source) raise Gitlab::Access::AccessDeniedError unless can_request_access?(source) source.members.create( access_level: Gitlab::Access::DEVELOPER, user: current_user, requested_at: Time.current.utc) end private def can_request_access?(source) can?(current_user, :request_access, source) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::RequestAccessService, feature_category: :groups_and_projects do let(:user) { create(:user) } shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do it 'raises Gitlab::Access::AccessDeniedError' do expect { described_class.new(user).execute(source) }.to raise_error(Gitlab::Access::AccessDeniedError) end end shared_examples 'a service creating a access request' do it 'succeeds' do expect { described_class.new(user).execute(source) }.to change { source.requesters.count }.by(1) end it 'returns a <Source>Member' do member = described_class.new(user).execute(source) expect(member).to be_a "#{source.class}Member".constantize expect(member.requested_at).to be_present end end context 'when source is nil' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { nil } end end context 'when current user cannot request access to the project' do %i[project group].each do |source_type| it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { create(source_type, :private) } end end end context 'when access requests are disabled' do %i[project group].each do |source_type| it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { create(source_type, :public, :request_access_disabled) } end end end context 'when current user can request access to the project' do %i[project group].each do |source_type| it_behaves_like 'a service creating a access request' do let(:source) { create(source_type, :public) } end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members class UpdateService < Members::BaseService # @param members [Member, Array<Member>] # returns the updated member(s) def execute(members, permission: :update) members = Array.wrap(members) old_access_level_expiry_map = members.to_h do |member| [member.id, { human_access: member.human_access, expires_at: member.expires_at }] end updated_members = update_members(members, permission) Member.transaction do updated_members.each { |member| post_update(member, permission, old_access_level_expiry_map) } end prepare_response(members) end private def update_members(members, permission) # `filter_map` avoids the `post_update` call for the member that resulted in no change Member.transaction do members.filter_map { |member| update_member(member, permission) } end rescue ActiveRecord::RecordInvalid [] end def update_member(member, permission) raise Gitlab::Access::AccessDeniedError unless has_update_permissions?(member, permission) member.attributes = params return unless member.changed? member.expiry_notified_at = nil if member.expires_at_changed? member.tap(&:save!) end def post_update(member, permission, old_access_level_expiry_map) old_access_level = old_access_level_expiry_map[member.id][:human_access] old_expiry = old_access_level_expiry_map[member.id][:expires_at] after_execute(action: permission, old_access_level: old_access_level, old_expiry: old_expiry, member: member) enqueue_delete_todos(member) if downgrading_to_guest? # Deletes only confidential issues todos for guests end def prepare_response(members) errored_members = members.select { |member| member.errors.any? } if errored_members.present? error_message = errored_members.flat_map { |member| member.errors.full_messages }.uniq.to_sentence return error(error_message, pass_back: { members: errored_members }) end success(members: members) end def has_update_permissions?(member, permission) can?(current_user, action_member_permission(permission, member), member) && !prevent_upgrade_to_owner?(member) && !prevent_downgrade_from_owner?(member) end def downgrading_to_guest? params[:access_level] == Gitlab::Access::GUEST end def upgrading_to_owner? params[:access_level] == Gitlab::Access::OWNER end def downgrading_from_owner?(member) member.owner? end def prevent_upgrade_to_owner?(member) upgrading_to_owner? && cannot_assign_owner_responsibilities_to_member_in_project?(member) end def prevent_downgrade_from_owner?(member) downgrading_from_owner?(member) && cannot_revoke_owner_responsibilities_from_member_in_project?(member) end end end Members::UpdateService.prepend_mod_with('Members::UpdateService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::UpdateService, feature_category: :groups_and_projects do let_it_be(:project) { create(:project, :public) } let_it_be(:group) { create(:group, :public) } let_it_be(:current_user) { create(:user) } let_it_be(:member_user1) { create(:user) } let_it_be(:member_user2) { create(:user) } let_it_be(:member_users) { [member_user1, member_user2] } let_it_be(:permission) { :update } let_it_be(:access_level) { Gitlab::Access::MAINTAINER } let(:members) { source.members_and_requesters.where(user_id: member_users).to_a } let(:update_service) { described_class.new(current_user, params) } let(:params) { { access_level: access_level } } let(:updated_members) { subject[:members] } before do member_users.first.tap do |member_user| expires_at = 10.days.from_now project.add_member(member_user, Gitlab::Access::DEVELOPER, expires_at: expires_at) group.add_member(member_user, Gitlab::Access::DEVELOPER, expires_at: expires_at) end member_users[1..].each do |member_user| project.add_developer(member_user) group.add_developer(member_user) end end shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do it 'raises Gitlab::Access::AccessDeniedError' do expect { subject } .to raise_error(Gitlab::Access::AccessDeniedError) end end shared_examples 'current user cannot update the given members' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let_it_be(:source) { project } end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let_it_be(:source) { group } end end shared_examples 'returns error status when params are invalid' do let_it_be(:params) { { expires_at: 2.days.ago } } specify do expect(subject[:status]).to eq(:error) end end shared_examples 'a service updating members' do it 'updates the members' do new_access_levels = updated_members.map(&:access_level) expect(updated_members).not_to be_empty expect(updated_members).to all(be_valid) expect(new_access_levels).to all(be access_level) end it 'returns success status' do expect(subject.fetch(:status)).to eq(:success) end it 'invokes after_execute with correct args' do members.each do |member| expect(update_service).to receive(:after_execute).with( action: permission, old_access_level: member.human_access, old_expiry: member.expires_at, member: member ) end subject end it 'authorization update callback is triggered' do expect(members).to all(receive(:refresh_member_authorized_projects).once) subject end it 'does not enqueues todos for deletion' do members.each do |member| expect(TodosDestroyer::EntityLeaveWorker) .not_to receive(:perform_in).with(Todo::WAIT_FOR_DELETE, member.user_id, member.source_id, source.class.name) end subject end context 'when members are downgraded to guest' do shared_examples 'schedules to delete confidential todos' do it do members.each do |member| expect(TodosDestroyer::EntityLeaveWorker) .to receive(:perform_in) .with(Todo::WAIT_FOR_DELETE, member.user_id, member.source_id, source.class.name).once end new_access_levels = updated_members.map(&:access_level) expect(updated_members).to all(be_valid) expect(new_access_levels).to all(be Gitlab::Access::GUEST) end end context 'with Gitlab::Access::GUEST level as a string' do let_it_be(:params) { { access_level: Gitlab::Access::GUEST.to_s } } it_behaves_like 'schedules to delete confidential todos' end context 'with Gitlab::Access::GUEST level as an integer' do let_it_be(:params) { { access_level: Gitlab::Access::GUEST } } it_behaves_like 'schedules to delete confidential todos' end end context 'when access_level is invalid' do let_it_be(:params) { { access_level: 'invalid' } } it 'raises an error' do expect { described_class.new(current_user, params) } .to raise_error(ArgumentError, 'invalid value for Integer(): "invalid"') end end context 'when members update results in no change' do let(:params) { { access_level: members.first.access_level } } it 'does not invoke update! and post_update' do expect(update_service).not_to receive(:save!) expect(update_service).not_to receive(:post_update) expect(subject[:status]).to eq(:success) end it 'authorization update callback is not triggered' do members.each { |member| expect(member).not_to receive(:refresh_member_authorized_projects) } subject end end end shared_examples 'updating a project' do let_it_be(:group_project) { create(:project, group: create(:group)) } let_it_be(:source) { group_project } before do member_users.each { |member_user| group_project.add_developer(member_user) } end context 'as a project maintainer' do before do group_project.add_maintainer(current_user) end it_behaves_like 'a service updating members' context 'when member update results in an error' do it_behaves_like 'a service returning an error' end context 'and updating members to OWNER' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let_it_be(:access_level) { Gitlab::Access::OWNER } end end context 'and updating themselves to OWNER' do let(:members) { source.members_and_requesters.find_by!(user_id: current_user.id) } it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let_it_be(:access_level) { Gitlab::Access::OWNER } end end context 'and downgrading members from OWNER' do before do member_users.each { |member_user| group_project.add_owner(member_user) } end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let_it_be(:access_level) { Gitlab::Access::MAINTAINER } end end end context 'when current_user is considered an owner in the project via inheritance' do before do group_project.group.add_owner(current_user) end context 'and can update members to OWNER' do before do member_users.each { |member_user| group_project.add_developer(member_user) } end it_behaves_like 'a service updating members' do let_it_be(:access_level) { Gitlab::Access::OWNER } end end context 'and can downgrade members from OWNER' do before do member_users.each { |member_user| group_project.add_owner(member_user) } end it_behaves_like 'a service updating members' do let_it_be(:access_level) { Gitlab::Access::MAINTAINER } end end end context 'when project members expiration date is updated with expiry_notified_at' do let_it_be(:params) { { expires_at: 20.days.from_now } } before do group_project.group.add_owner(current_user) members.each do |member| member.update!(expiry_notified_at: Date.today) end end it "clear expiry_notified_at" do subject members.each do |member| expect(member.reload.expiry_notified_at).to be_nil end end end end shared_examples 'updating a group' do let_it_be(:source) { group } before do group.add_owner(current_user) end it_behaves_like 'a service updating members' context 'when member update results in an error' do it_behaves_like 'a service returning an error' end context 'when group members expiration date is updated' do let_it_be(:params) { { expires_at: 20.days.from_now } } let(:notification_service) { instance_double(NotificationService) } before do allow(NotificationService).to receive(:new).and_return(notification_service) end it 'emails the users that their group membership expiry has changed' do members.each do |member| expect(notification_service).to receive(:updated_group_member_expiration).with(member) end subject end end context 'when group members expiration date is updated with expiry_notified_at' do let_it_be(:params) { { expires_at: 20.days.from_now } } before do members.each do |member| member.update!(expiry_notified_at: Date.today) end end it "clear expiry_notified_at" do subject members.each do |member| expect(member.reload.expiry_notified_at).to be_nil end end end end subject { update_service.execute(members, permission: permission) } shared_examples 'a service returning an error' do it_behaves_like 'returns error status when params are invalid' context 'when a member update results in invalid record' do let(:member2) { members.second } before do allow(member2).to receive(:save!) do member2.errors.add(:user_id) member2.errors.add(:access_level) end.and_raise(ActiveRecord::RecordInvalid) end it 'returns the error' do response = subject expect(response[:status]).to eq(:error) expect(response[:message]).to eq('User is invalid and Access level is invalid') end it 'rollbacks back the entire update' do old_access_levels = members.pluck(:access_level) subject expect(members.each(&:reset).pluck(:access_level)).to eq(old_access_levels) end end end it_behaves_like 'current user cannot update the given members' it_behaves_like 'updating a project' it_behaves_like 'updating a group' context 'with a single member' do let(:members) { create(:group_member, group: group) } before do group.add_owner(current_user) end it 'returns the correct response' do expect(subject[:members]).to contain_exactly(members) end end context 'when current user is an admin', :enable_admin_mode do let_it_be(:current_user) { create(:admin) } let_it_be(:source) { group } context 'when all owners are being downgraded' do before do member_users.each { |member_user| group.add_owner(member_user) } end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' end context 'when all blocked owners are being downgraded' do before do member_users.each do |member_user| group.add_owner(member_user) member_user.block end end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members class CreateService < Members::BaseService BlankInvitesError = Class.new(StandardError) TooManyInvitesError = Class.new(StandardError) MembershipLockedError = Class.new(StandardError) DEFAULT_INVITE_LIMIT = 100 attr_reader :membership_locked def initialize(*args) super @errors = [] @invites = invites_from_params @source = params[:source] end def execute raise Gitlab::Access::AccessDeniedError unless can?(current_user, create_member_permission(source), source) if adding_at_least_one_owner && cannot_assign_owner_responsibilities_to_member_in_project? raise Gitlab::Access::AccessDeniedError end validate_invite_source! validate_invitable! add_members after_add_hooks enqueue_onboarding_progress_action publish_event! result rescue BlankInvitesError, TooManyInvitesError, MembershipLockedError => e Gitlab::ErrorTracking.log_exception(e, class: self.class.to_s, user_id: current_user.id) error(e.message) end def single_member members.last end private attr_reader :source, :errors, :invites, :member_created_namespace_id, :members def adding_at_least_one_owner params[:access_level] == Gitlab::Access::OWNER end def cannot_assign_owner_responsibilities_to_member_in_project? source.is_a?(Project) && !current_user.can?(:manage_owners, source) end def invites_from_params # String, Nil, Array, Integer return params[:user_id] if params[:user_id].is_a?(Array) return [] unless params[:user_id] params[:user_id].to_s.split(',').uniq end def validate_invite_source! raise ArgumentError, s_('AddMember|No invite source provided.') unless invite_source.present? end def validate_invitable! raise BlankInvitesError, blank_invites_message if invites.blank? return unless user_limit && invites.size > user_limit message = format(s_("AddMember|Too many users specified (limit is %{user_limit})"), user_limit: user_limit) raise TooManyInvitesError, message end def blank_invites_message s_('AddMember|No users specified.') end def add_members @members = creator_service.add_members( source, invites, params[:access_level], **create_params ) members.each { |member| process_result(member) } end def creator_service "Members::#{source.class.to_s.pluralize}::CreatorService".constantize end def create_params { expires_at: params[:expires_at], current_user: current_user } end def process_result(member) existing_errors = member.errors.full_messages # calling invalid? clears any errors that were added outside of the # rails validation process if member.invalid? || existing_errors.present? add_error_for_member(member, existing_errors) else after_execute(member: member) @member_created_namespace_id ||= member.namespace_id end end # overridden def add_error_for_member(member, existing_errors) prefix = "#{member.user.username}: " if member.user.present? errors << "#{prefix}#{all_member_errors(member, existing_errors).to_sentence}" end def all_member_errors(member, existing_errors) existing_errors.concat(member.errors.full_messages).uniq end def after_add_hooks # overridden in subclasses/ee end def after_execute(member:) super track_invite_source(member) end def track_invite_source(member) Gitlab::Tracking.event( self.class.name, 'create_member', label: invite_source, property: tracking_property(member), user: current_user ) end def invite_source params[:invite_source] end def tracking_property(member) # ideally invites go down the invite service class instead, but there is nothing that limits an invite # from being used in this class and if you send emails as a comma separated list to the api/members # endpoint, it will support invites member.invite? ? 'net_new_user' : 'existing_user' end def user_limit limit = params.fetch(:limit, DEFAULT_INVITE_LIMIT) limit && limit < 0 ? nil : limit end def enqueue_onboarding_progress_action return unless at_least_one_member_created? Onboarding::UserAddedWorker.perform_async(member_created_namespace_id) end def at_least_one_member_created? member_created_namespace_id.present? end def result if errors.any? error(formatted_errors) else success end end def formatted_errors errors.to_sentence end def publish_event! return unless at_least_one_member_created? Gitlab::EventStore.publish( Members::MembersAddedEvent.new(data: { source_id: source.id, source_type: source.class.name }) ) end def create_member_permission(source) case source when Group :admin_group_member when Project :admin_project_member else raise "Unknown source type: #{source.class}!" end end end end Members::CreateService.prepend_mod_with('Members::CreateService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_cache, :clean_gitlab_redis_shared_state, :sidekiq_inline, feature_category: :groups_and_projects do let_it_be(:source, reload: true) { create(:project) } let_it_be(:user) { create(:user) } let_it_be(:member) { create(:user) } let_it_be(:user_invited_by_id) { create(:user) } let_it_be(:user_id) { member.id.to_s } let_it_be(:access_level) { Gitlab::Access::GUEST } let(:additional_params) { { invite_source: '_invite_source_' } } let(:params) { { user_id: user_id, access_level: access_level }.merge(additional_params) } let(:current_user) { user } subject(:execute_service) { described_class.new(current_user, params.merge({ source: source })).execute } before do case source when Project source.add_maintainer(user) Onboarding::Progress.onboard(source.namespace) when Group source.add_owner(user) Onboarding::Progress.onboard(source) end end context 'when the current user does not have permission to create members' do let(:current_user) { create(:user) } it 'raises a Gitlab::Access::AccessDeniedError' do expect { execute_service }.to raise_error(Gitlab::Access::AccessDeniedError) end context 'when a project maintainer attempts to add owners' do let(:access_level) { Gitlab::Access::OWNER } before do source.add_maintainer(current_user) end it 'raises a Gitlab::Access::AccessDeniedError' do expect { execute_service }.to raise_error(Gitlab::Access::AccessDeniedError) end end end context 'when passing an invalid source' do let_it_be(:source) { Object.new } it 'raises a RuntimeError' do expect { execute_service }.to raise_error(RuntimeError, 'Unknown source type: Object!') end end context 'when passing valid parameters' do it 'adds a user to members' do expect(execute_service[:status]).to eq(:success) expect(source.users).to include member expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true) end context 'when user_id is passed as an integer' do let(:user_id) { member.id } it 'successfully creates member' do expect(execute_service[:status]).to eq(:success) expect(source.users).to include member expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true) end end context 'with user_id as an array of integers' do let(:user_id) { [member.id, user_invited_by_id.id] } it 'successfully creates members' do expect(execute_service[:status]).to eq(:success) expect(source.users).to include(member, user_invited_by_id) expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true) end end context 'with user_id as an array of strings' do let(:user_id) { [member.id.to_s, user_invited_by_id.id.to_s] } it 'successfully creates members' do expect(execute_service[:status]).to eq(:success) expect(source.users).to include(member, user_invited_by_id) expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true) end end context 'when executing on a group' do let_it_be(:source) { create(:group) } it 'adds a user to members' do expect(execute_service[:status]).to eq(:success) expect(source.users).to include member expect(Onboarding::Progress.completed?(source, :user_added)).to be(true) end it 'triggers a members added event' do expect(Gitlab::EventStore) .to receive(:publish) .with(an_instance_of(Members::MembersAddedEvent)) .and_call_original expect(execute_service[:status]).to eq(:success) end end context 'when only one user fails validations' do let_it_be(:source) { create(:project, group: create(:group)) } let(:user_id) { [member.id, user_invited_by_id.id] } before do # validations will fail because we try to invite them to the project as a guest source.group.add_developer(member) end it 'triggers the members added and authorizations changed events' do expect(Gitlab::EventStore) .to receive(:publish) .with(an_instance_of(ProjectAuthorizations::AuthorizationsChangedEvent)) .and_call_original expect(Gitlab::EventStore) .to receive(:publish) .with(an_instance_of(Members::MembersAddedEvent)) .and_call_original expect(execute_service[:status]).to eq(:error) expect(execute_service[:message]) .to include 'Access level should be greater than or equal to Developer inherited membership from group' expect(source.users).not_to include(member) expect(source.users).to include(user_invited_by_id) end end context 'when all users fail validations' do let_it_be(:source) { create(:project, group: create(:group)) } let(:user_id) { [member.id, user_invited_by_id.id] } before do # validations will fail because we try to invite them to the project as a guest source.group.add_developer(member) source.group.add_developer(user_invited_by_id) end it 'does not trigger the members added event' do expect(Gitlab::EventStore) .not_to receive(:publish) .with(an_instance_of(Members::MembersAddedEvent)) expect(execute_service[:status]).to eq(:error) expect(execute_service[:message]) .to include 'Access level should be greater than or equal to Developer inherited membership from group' expect(source.users).not_to include(member, user_invited_by_id) end end end context 'when passing no user ids' do let(:user_id) { '' } it 'does not add a member' do expect(Gitlab::ErrorTracking) .to receive(:log_exception) .with(an_instance_of(described_class::BlankInvitesError), class: described_class.to_s, user_id: user.id) expect(Gitlab::EventStore) .not_to receive(:publish) .with(an_instance_of(Members::MembersAddedEvent)) expect(execute_service[:status]).to eq(:error) expect(execute_service[:message]).to eq(s_('AddMember|No users specified.')) expect(source.users).not_to include member expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false) end end context 'when passing many user ids' do let(:user_id) { 1.upto(101).to_a.join(',') } it 'limits the number of users to 100' do expect(Gitlab::ErrorTracking) .to receive(:log_exception) .with(an_instance_of(described_class::TooManyInvitesError), class: described_class.to_s, user_id: user.id) expect(execute_service[:status]).to eq(:error) expect(execute_service[:message]).to be_present expect(source.users).not_to include member expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false) end end context 'when passing an invalid access level' do let(:access_level) { -1 } it 'does not add a member' do expect(execute_service[:status]).to eq(:error) expect(execute_service[:message]).to include("#{member.username}: Access level is not included in the list") expect(source.users).not_to include member expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false) end end context 'when passing an existing invite user id' do let(:invited_member) { create(:project_member, :guest, :invited, project: source) } let(:user_id) { invited_member.invite_email } let(:access_level) { ProjectMember::MAINTAINER } it 'allows already invited members to be re-invited by email and updates the member access' do expect(execute_service[:status]).to eq(:success) expect(invited_member.reset.access_level).to eq ProjectMember::MAINTAINER expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true) end end context 'when adding a project_bot' do let_it_be(:project_bot) { create(:user, :project_bot) } let(:user_id) { project_bot.id } context 'when project_bot is already a member' do before do source.add_developer(project_bot) end it 'does not update the member' do expect(execute_service[:status]).to eq(:error) expect(execute_service[:message]).to eq("#{project_bot.username}: not authorized to update member") expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(false) end end context 'when project_bot is not already a member' do it 'adds the member' do expect(execute_service[:status]).to eq(:success) expect(source.users).to include project_bot expect(Onboarding::Progress.completed?(source.namespace, :user_added)).to be(true) end end end context 'when tracking the invite source', :snowplow do context 'when invite_source is not passed' do let(:additional_params) { {} } it 'raises an error' do expect { execute_service }.to raise_error(ArgumentError, 'No invite source provided.') expect_no_snowplow_event end end context 'when invite_source is passed' do it 'tracks the invite source from params' do execute_service expect_snowplow_event( category: described_class.name, action: 'create_member', label: '_invite_source_', property: 'existing_user', user: user ) end context 'with an already existing member' do before do source.add_developer(member) end it 'tracks the invite source from params' do execute_service expect_snowplow_event( category: described_class.name, action: 'create_member', label: '_invite_source_', property: 'existing_user', user: user ) end end end context 'when it is a net_new_user' do let(:additional_params) { { invite_source: '_invite_source_', user_id: '[email protected]' } } it 'tracks the invite source from params' do execute_service expect_snowplow_event( category: described_class.name, action: 'create_member', label: '_invite_source_', property: 'net_new_user', user: user ) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members class ImportProjectTeamService < BaseService ImportProjectTeamForbiddenError = Class.new(StandardError) def initialize(*args) super @errors = {} end def execute check_target_and_source_projects_exist! check_user_permissions! import_project_team process_import_result result rescue ArgumentError, ImportProjectTeamForbiddenError => e ServiceResponse.error(message: e.message, reason: :unprocessable_entity) end private attr_reader :members, :params, :current_user, :errors, :result def import_project_team @members = target_project.team.import(source_project, current_user) if members.is_a?(Array) members.each { |member| check_member_validity(member) } else @result = ServiceResponse.error(message: 'Import failed', reason: :unprocessable_entity) end end def check_target_and_source_projects_exist! if target_project.blank? raise ArgumentError, 'Target project does not exist' elsif source_project.blank? raise ArgumentError, 'Source project does not exist' end end def check_user_permissions! return if can?(current_user, :read_project_member, source_project) && can?(current_user, :import_project_members_from_another_project, target_project) raise ImportProjectTeamForbiddenError, 'Forbidden' end def check_member_validity(member) return if member.valid? errors[member.user.username] = member.errors.full_messages.to_sentence end def process_import_result @result ||= if errors.any? ServiceResponse.error(message: errors, payload: { total_members_count: members.size }) else ServiceResponse.success(message: 'Successfully imported') end end def target_project_id params[:id] end def source_project_id params[:project_id] end def target_project @target_project ||= Project.find_by_id(target_project_id) end def source_project @source_project ||= Project.find_by_id(source_project_id) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::ImportProjectTeamService, feature_category: :groups_and_projects do describe '#execute' do let_it_be(:source_project) { create(:project) } let_it_be(:target_project) { create(:project) } let_it_be(:user) { create(:user) } let(:source_project_id) { source_project.id } let(:target_project_id) { target_project.id } subject(:import) { described_class.new(user, { id: target_project_id, project_id: source_project_id }) } before_all do source_project.add_guest(user) target_project.add_maintainer(user) end context 'when project team members are imported successfully' do it 'returns a successful response' do result = import.execute expect(result).to be_a(ServiceResponse) expect(result.success?).to be(true) expect(result.message).to eq('Successfully imported') end end context 'when the project team import fails' do context 'when the target project cannot be found' do let(:target_project_id) { non_existing_record_id } it 'returns unsuccessful response' do result = import.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq('Target project does not exist') expect(result.reason).to eq(:unprocessable_entity) end end context 'when the source project cannot be found' do let(:source_project_id) { non_existing_record_id } it 'returns unsuccessful response' do result = import.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq('Source project does not exist') expect(result.reason).to eq(:unprocessable_entity) end end context 'when the user doing the import does not exist' do let(:user) { nil } it 'returns unsuccessful response' do result = import.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq('Forbidden') expect(result.reason).to eq(:unprocessable_entity) end end context 'when the user does not have permission to read the source project members' do let(:source_project_id) { create(:project, :private).id } it 'returns unsuccessful response' do result = import.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq('Forbidden') expect(result.reason).to eq(:unprocessable_entity) end end context 'when the user does not have permission to admin the target project' do let(:target_project_id) { create(:project).id } it 'returns unsuccessful response' do result = import.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq('Forbidden') expect(result.reason).to eq(:unprocessable_entity) end end context 'when the source and target project are valid but the ProjectTeam#import command fails' do before do allow_next_instance_of(ProjectTeam) do |project_team| allow(project_team).to receive(:import).and_return(false) end end it 'returns unsuccessful response' do result = import.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq('Import failed') expect(result.reason).to eq(:unprocessable_entity) end end context 'when one of the imported project members is invalid' do it 'returns unsuccessful response' do project_bot = create(:user, :project_bot) source_project.add_developer(project_bot) result = import.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) message = { project_bot.username => 'User project bots cannot be added to other groups / projects' } expect(result.message).to eq(message) expect(result.payload[:total_members_count]).to eq(2) end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members class ApproveAccessRequestService < Members::BaseService def execute(access_requester, skip_authorization: false, skip_log_audit_event: false) validate_access!(access_requester) unless skip_authorization access_requester.access_level = params[:access_level] if params[:access_level] access_requester.accept_request(current_user) after_execute(member: access_requester, skip_log_audit_event: skip_log_audit_event) access_requester end private def after_execute(member:, skip_log_audit_event:) super resolve_access_request_todos(member) end def validate_access!(access_requester) raise Gitlab::Access::AccessDeniedError unless can_approve_access_requester?(access_requester) if approving_member_with_owner_access_level?(access_requester) && cannot_assign_owner_responsibilities_to_member_in_project?(access_requester) raise Gitlab::Access::AccessDeniedError end end def can_approve_access_requester?(access_requester) can?(current_user, :admin_member_access_request, access_requester.source) end def approving_member_with_owner_access_level?(access_requester) access_level_value = params[:access_level] || access_requester.access_level access_level_value == Gitlab::Access::OWNER end end end Members::ApproveAccessRequestService.prepend_mod_with('Members::ApproveAccessRequestService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::ApproveAccessRequestService, feature_category: :groups_and_projects do let(:project) { create(:project, :public) } let(:group) { create(:group, :public) } let(:current_user) { create(:user) } let(:access_requester_user) { create(:user) } let(:access_requester) { source.requesters.find_by!(user_id: access_requester_user.id) } let(:opts) { {} } let(:params) { {} } let(:custom_access_level) { Gitlab::Access::MAINTAINER } shared_examples 'a service raising Gitlab::Access::AccessDeniedError' do it 'raises Gitlab::Access::AccessDeniedError' do expect do described_class.new(current_user, params).execute(access_requester, **opts) end.to raise_error(Gitlab::Access::AccessDeniedError) end end shared_examples 'a service approving an access request' do it 'succeeds' do expect do described_class.new(current_user, params).execute(access_requester, **opts) end.to change { source.requesters.count }.by(-1) end it 'returns a <Source>Member' do member = described_class.new(current_user, params).execute(access_requester, **opts) expect(member).to be_a "#{source.class}Member".constantize expect(member.requested_at).to be_nil end it 'calls the method to resolve access request for the approver' do expect_next_instance_of(described_class) do |instance| expect(instance).to receive(:resolve_access_request_todos).with(access_requester) end described_class.new(current_user, params).execute(access_requester, **opts) end it 'resolves the todos for the access requests' do expect_next_instance_of(TodoService) do |instance| expect(instance).to receive(:resolve_access_request_todos).with(access_requester) end described_class.new(current_user, params).execute(access_requester, **opts) end context 'with a custom access level' do let(:params) { { access_level: custom_access_level } } it 'returns a ProjectMember with the custom access level' do member = described_class.new(current_user, params).execute(access_requester, **opts) expect(member.access_level).to eq(custom_access_level) end end end context 'when an access requester is found' do before do project.request_access(access_requester_user) group.request_access(access_requester_user) end context 'when current user is nil' do let(:user) { nil } context 'and :ldap option is not given' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { project } end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { group } end end context 'and :skip_authorization option is false' do let(:opts) { { skip_authorization: false } } it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { project } end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { group } end end context 'and :skip_authorization option is true' do let(:opts) { { skip_authorization: true } } it_behaves_like 'a service approving an access request' do let(:source) { project } end it_behaves_like 'a service approving an access request' do let(:source) { group } end end end context 'when current user cannot approve access request to the project' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { project } end it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' do let(:source) { group } end end context 'when current user can approve access request to the project' do before do project.add_maintainer(current_user) group.add_owner(current_user) end it_behaves_like 'a service approving an access request' do let(:source) { project } end it_behaves_like 'a service approving an access request' do let(:source) { group } end end context 'in a project' do let_it_be(:group_project) { create(:project, :public, group: create(:group, :public)) } let(:source) { group_project } let(:custom_access_level) { Gitlab::Access::OWNER } let(:params) { { access_level: custom_access_level } } before do group_project.request_access(access_requester_user) end context 'maintainers' do before do group_project.add_maintainer(current_user) end context 'cannot approve the access request of a requester to give them OWNER permissions' do it_behaves_like 'a service raising Gitlab::Access::AccessDeniedError' end end context 'owners' do before do # so that `current_user` is considered an `OWNER` in the project via inheritance. group_project.group.add_owner(current_user) end context 'can approve the access request of a requester to give them OWNER permissions' do it_behaves_like 'a service approving an access request' end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members module Projects class CreatorService < Members::CreatorService class << self def cannot_manage_owners?(source, current_user) !Ability.allowed?(current_user, :manage_owners, source) end end private def can_create_new_member? return false if assigning_project_member_with_owner_access_level? && cannot_assign_owner_responsibilities_to_member_in_project? # This access check(`admin_project_member`) will write to safe request store cache for the user being added. # This means any operations inside the same request will need to purge that safe request # store cache if operations are needed to be done inside the same request that checks max member access again on # that user. current_user.can?(:admin_project_member, member.project) || adding_the_creator_as_owner_in_a_personal_project? end def can_update_existing_member? # rubocop:disable Layout/EmptyLineAfterGuardClause raise ::Gitlab::Access::AccessDeniedError if assigning_project_member_with_owner_access_level? && cannot_assign_owner_responsibilities_to_member_in_project? # rubocop:enable Layout/EmptyLineAfterGuardClause current_user.can?(:update_project_member, member) end def adding_the_creator_as_owner_in_a_personal_project? # this condition is reached during testing setup a lot due to use of `.add_member` member.project.personal_namespace_holder?(member.user) end def assigning_project_member_with_owner_access_level? return true if member && member.owner? access_level == Gitlab::Access::OWNER end def cannot_assign_owner_responsibilities_to_member_in_project? member.is_a?(ProjectMember) && !current_user.can?(:manage_owners, member.source) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::Projects::CreatorService, feature_category: :groups_and_projects do let_it_be(:source, reload: true) { create(:project, :public) } let_it_be(:source2, reload: true) { create(:project, :public) } let_it_be(:user) { create(:user) } describe '.access_levels' do it 'returns Gitlab::Access.sym_options_with_owner' do expect(described_class.access_levels).to eq(Gitlab::Access.sym_options_with_owner) end end it_behaves_like 'owner management' describe '.add_members' do it_behaves_like 'bulk member creation' do let_it_be(:source_type) { Project } let_it_be(:member_type) { ProjectMember } end end describe '.add_member' do it_behaves_like 'member creation' do let_it_be(:member_type) { ProjectMember } end context 'authorized projects update' do it 'schedules a single project authorization update job when called multiple times' do stub_feature_flags(do_not_run_safety_net_auth_refresh_jobs: false) expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to receive(:bulk_perform_in).once 1.upto(3) do described_class.add_member(source, user, :maintainer) end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Members module Groups class CreatorService < Members::CreatorService class << self def cannot_manage_owners?(source, current_user) source.max_member_access_for_user(current_user) < Gitlab::Access::OWNER end end private def can_create_new_member? current_user.can?(:admin_group_member, member.group) end def can_update_existing_member? current_user.can?(:update_group_member, member) end end end end Members::Groups::CreatorService.prepend_mod_with('Members::Groups::CreatorService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Members::Groups::CreatorService, feature_category: :groups_and_projects do let_it_be(:source, reload: true) { create(:group, :public) } let_it_be(:source2, reload: true) { create(:group, :public) } let_it_be(:user) { create(:user) } describe '.access_levels' do it 'returns Gitlab::Access.options_with_owner' do expect(described_class.access_levels).to eq(Gitlab::Access.sym_options_with_owner) end end it_behaves_like 'owner management' describe '.add_members' do it_behaves_like 'bulk member creation' do let_it_be(:source_type) { Group } let_it_be(:member_type) { GroupMember } end end describe '.add_member' do it_behaves_like 'member creation' do let_it_be(:member_type) { GroupMember } end context 'authorized projects update' do it 'schedules a single project authorization update job when called multiple times' do # this is inline with the overridden behaviour in stubbed_member.rb worker_instance = AuthorizedProjectsWorker.new expect(AuthorizedProjectsWorker).to receive(:new).once.and_return(worker_instance) expect(worker_instance).to receive(:perform).with(user.id) 1.upto(3) do described_class.add_member(source, user, :maintainer) end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module AlertManagement class ProcessPrometheusAlertService extend ::Gitlab::Utils::Override include ::AlertManagement::AlertProcessing include ::AlertManagement::Responses def initialize(project, payload, integration: nil) @project = project @payload = payload @integration = integration end def execute return bad_request unless incoming_payload.has_required_attributes? process_alert return bad_request unless alert.persisted? complete_post_processing_tasks success(alert) end private attr_reader :project, :payload, :integration override :incoming_payload def incoming_payload strong_memoize(:incoming_payload) do Gitlab::AlertManagement::Payload.parse( project, payload, integration: integration, monitoring_tool: Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus] ) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe AlertManagement::ProcessPrometheusAlertService, feature_category: :incident_management do let_it_be(:project, reload: true) { create(:project, :repository) } let(:service) { described_class.new(project, payload) } describe '#execute' do include_context 'incident management settings enabled' subject(:execute) { service.execute } before do stub_licensed_features(oncall_schedules: false, generic_alert_fingerprinting: false) end context 'when alert payload is valid' do let_it_be(:starts_at) { '2020-04-27T10:10:22.265949279Z' } let_it_be(:title) { 'Alert title' } let_it_be(:fingerprint) { [starts_at, title, 'vector(1)'].join('/') } let_it_be(:source) { 'Prometheus' } let(:prometheus_status) { 'firing' } let(:payload) do { 'status' => prometheus_status, 'labels' => { 'alertname' => 'GitalyFileServerDown', 'channel' => 'gitaly', 'pager' => 'pagerduty', 'severity' => 's1' }, 'annotations' => { 'description' => 'Alert description', 'runbook' => 'troubleshooting/gitaly-down.md', 'title' => title }, 'startsAt' => starts_at, 'endsAt' => '2020-04-27T10:20:22.265949279Z', 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1' } end it_behaves_like 'processes new firing alert' include_examples 'handles race condition in alert creation' context 'with resolving payload' do let(:prometheus_status) { 'resolved' } it_behaves_like 'processes recovery alert' end context 'environment given' do let(:environment) { create(:environment, project: project) } let(:alert) { project.alert_management_alerts.last } before do payload['labels']['gitlab_environment_name'] = environment.name end it 'sets the environment' do execute expect(alert.environment).to eq(environment) end end end context 'when alert payload is invalid' do let(:payload) { {} } it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module AlertManagement class CreateAlertIssueService include Gitlab::Utils::StrongMemoize DEFAULT_ALERT_TITLE = ::Gitlab::AlertManagement::Payload::Generic::DEFAULT_TITLE DEFAULT_INCIDENT_TITLE = 'New: Incident' # @param alert [AlertManagement::Alert] # @param user [User] def initialize(alert, user) @alert = alert @user = user end def execute return error_no_permissions unless allowed? return error_issue_already_exists if alert.issue result = create_incident return result unless result.success? issue = result[:issue] perform_after_create_tasks(issue) result end private attr_reader :alert, :user delegate :project, to: :alert def allowed? user.can?(:create_issue, project) end def create_incident ::IncidentManagement::Incidents::CreateService.new( project, user, title: alert_presenter.title, description: alert_presenter.issue_description, severity: alert.severity, alert: alert ).execute end def update_title_for(issue) return unless issue.title == DEFAULT_ALERT_TITLE issue.update!(title: "#{DEFAULT_INCIDENT_TITLE} #{issue.iid}") end def perform_after_create_tasks(issue) update_title_for(issue) SystemNoteService.new_alert_issue(alert, issue, user) end def error(message, issue = nil) ServiceResponse.error(payload: { issue: issue }, message: message) end def error_issue_already_exists error(_('An issue already exists')) end def error_no_permissions error(_('You have no permissions')) end def alert_presenter strong_memoize(:alert_presenter) do alert.present end end end end AlertManagement::CreateAlertIssueService.prepend_mod ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe AlertManagement::CreateAlertIssueService, feature_category: :incident_management do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } let_it_be(:project) { create(:project, group: group) } let_it_be(:payload) do { 'startsAt' => '2020-04-27T10:10:22.265949279Z', 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1' } end let_it_be(:generic_alert, reload: true) { create(:alert_management_alert, :triggered, project: project, payload: payload) } let_it_be(:prometheus_alert, reload: true) { create(:alert_management_alert, :triggered, :prometheus, project: project, payload: payload) } let(:alert) { generic_alert } let(:alert_presenter) { alert.present } let(:created_issue) { Issue.last! } describe '#execute' do subject(:execute) { described_class.new(alert, user).execute } before do allow(user).to receive(:can?).and_call_original allow(user).to receive(:can?) .with(:create_issue, project) .and_return(can_create) end shared_examples 'creating an alert issue' do it 'creates an issue' do expect { execute }.to change { project.issues.count }.by(1) end it 'returns a created issue' do expect(execute.payload).to eq(issue: created_issue) end it 'has a successful status' do expect(execute).to be_success end it 'sets alert.issue_id in the same ActiveRecord query execution' do execute expect(alert.issue_id).to eq(created_issue.id) end it 'creates a system note' do expect { execute }.to change { alert.reload.notes.count }.by(1) end end shared_examples 'setting an issue attributes' do before do execute end it 'sets issue author to the current user' do expect(created_issue.author).to eq(user) end it 'sets the issue title' do expect(created_issue.title).to eq(alert.title) end it 'sets the issue description' do expect(created_issue.description).to include(alert_presenter.send(:issue_summary_markdown).strip) end end context 'when a user is allowed to create an issue' do let(:can_create) { true } before do project.add_developer(user) end it 'checks permissions' do execute expect(user).to have_received(:can?).with(:create_issue, project).exactly(2).times end context 'with alert severity' do using RSpec::Parameterized::TableSyntax where(:alert_severity, :incident_severity) do 'critical' | 'critical' 'high' | 'high' 'medium' | 'medium' 'low' | 'low' 'info' | 'unknown' 'unknown' | 'unknown' end with_them do before do alert.update!(severity: alert_severity) execute end it 'sets the correct severity level' do expect(created_issue.severity).to eq(incident_severity) end end end context 'when the alert is prometheus alert' do let(:alert) { prometheus_alert } let(:issue) { subject.payload[:issue] } it_behaves_like 'creating an alert issue' it_behaves_like 'setting an issue attributes' end context 'when the alert is generic' do let(:alert) { generic_alert } let(:issue) { subject.payload[:issue] } let(:default_alert_title) { described_class::DEFAULT_ALERT_TITLE } it_behaves_like 'creating an alert issue' it_behaves_like 'setting an issue attributes' context 'when alert title matches the default title exactly' do before do generic_alert.update!(title: default_alert_title) end it 'updates issue title with the IID' do execute expect(created_issue.title).to eq("New: Incident #{created_issue.iid}") end end context 'when the alert title contains the default title' do let(:non_default_alert_title) { "Not #{default_alert_title}" } before do generic_alert.update!(title: non_default_alert_title) end it 'does not change issue title' do execute expect(created_issue.title).to eq(non_default_alert_title) end end end context 'when issue cannot be created' do let(:alert) { generic_alert } before do # Invalid alert alert.update_columns(title: '') end it 'has an unsuccessful status' do expect(execute).to be_error expect(execute.errors).to contain_exactly("Title can't be blank") end end context 'when alert cannot be updated' do let(:alert) { create(:alert_management_alert, :with_validation_errors, :triggered, project: project, payload: payload) } it 'responds with error' do expect(execute).to be_error expect(execute.errors).to contain_exactly('Hosts hosts array is over 255 chars') end end context 'when alert already has an attached issue' do let!(:issue) { create(:issue, project: project) } before do alert.update!(issue_id: issue.id) end it 'does not create yet another issue' do expect { execute }.not_to change(Issue, :count) end it 'responds with error' do expect(execute).to be_error expect(execute.message).to eq(_('An issue already exists')) end end end context 'when a user is not allowed to create an issue' do let(:can_create) { false } it 'checks permissions' do execute expect(user).to have_received(:can?).with(:create_issue, project) end it 'responds with error' do expect(execute).to be_error expect(execute.message).to eq(_('You have no permissions')) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module AlertManagement module HttpIntegrations class DestroyService # @param integration [AlertManagement::HttpIntegration] # @param current_user [User] def initialize(integration, current_user) @integration = integration @current_user = current_user end def execute return error_no_permissions unless allowed? return error_legacy_prometheus unless destroy_allowed? if integration.destroy success else error(integration.errors.full_messages.to_sentence) end end private attr_reader :integration, :current_user def allowed? current_user&.can?(:admin_operations, integration) end # Prevents downtime while migrating from Integrations::Prometheus. # Remove with https://gitlab.com/gitlab-org/gitlab/-/issues/409734 def destroy_allowed? !(integration.legacy? && integration.prometheus?) end def error(message) ServiceResponse.error(message: message) end def success ServiceResponse.success(payload: { integration: integration }) end def error_no_permissions error(_('You have insufficient permissions to remove this HTTP integration')) end def error_legacy_prometheus error(_('Legacy Prometheus integrations cannot currently be removed')) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe AlertManagement::HttpIntegrations::DestroyService, feature_category: :incident_management do let_it_be(:user_with_permissions) { create(:user) } let_it_be(:user_without_permissions) { create(:user) } let_it_be(:project) { create(:project) } let!(:integration) { create(:alert_management_http_integration, project: project) } let(:current_user) { user_with_permissions } let(:params) { {} } let(:service) { described_class.new(integration, current_user) } before_all do project.add_maintainer(user_with_permissions) end describe '#execute' do shared_examples 'error response' do |message| it 'has an informative message' do expect(response).to be_error expect(response.message).to eq(message) end end subject(:response) { service.execute } context 'when the current_user is anonymous' do let(:current_user) { nil } it_behaves_like 'error response', 'You have insufficient permissions to remove this HTTP integration' end context 'when current_user does not have permission to create integrations' do let(:current_user) { user_without_permissions } it_behaves_like 'error response', 'You have insufficient permissions to remove this HTTP integration' end context 'when an error occurs during removal' do before do allow(integration).to receive(:destroy).and_return(false) integration.errors.add(:name, 'cannot be removed') end it_behaves_like 'error response', 'Name cannot be removed' end context 'when destroying a legacy Prometheus integration' do let_it_be(:existing_integration) { create(:alert_management_prometheus_integration, :legacy, project: project) } let!(:integration) { existing_integration } it_behaves_like 'error response', 'Legacy Prometheus integrations cannot currently be removed' end it 'successfully returns the integration' do expect(response).to be_success integration_result = response.payload[:integration] expect(integration_result).to be_a(::AlertManagement::HttpIntegration) expect(integration_result.name).to eq(integration.name) expect(integration_result.active).to eq(integration.active) expect(integration_result.token).to eq(integration.token) expect(integration_result.endpoint_identifier).to eq(integration.endpoint_identifier) expect { integration.reload }.to raise_error ActiveRecord::RecordNotFound end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module AlertManagement module HttpIntegrations class UpdateService < BaseService # @param integration [AlertManagement::HttpIntegration] # @param current_user [User] # @param params [Hash] def initialize(integration, current_user, params) @integration = integration super(integration.project, current_user, params) end def execute return error_no_permissions unless allowed? integration.transaction do if integration.update(permitted_params.merge(token_params)) @response = success(integration) if type_update? && too_many_integrations?(integration) @response = error_multiple_integrations raise ActiveRecord::Rollback end else @response = error_on_save(integration) end end @response end private attr_reader :integration def token_params return {} unless params[:regenerate_token] { token: nil } end def type_update? params[:type_identifier].present? end def error_no_permissions error(_('You have insufficient permissions to update this HTTP integration')) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe AlertManagement::HttpIntegrations::UpdateService, feature_category: :incident_management do let_it_be(:user_with_permissions) { create(:user) } let_it_be(:user_without_permissions) { create(:user) } let_it_be(:project) { create(:project) } let_it_be_with_reload(:integration) { create(:alert_management_http_integration, :inactive, project: project, name: 'Old Name') } let(:current_user) { user_with_permissions } let(:params) { {} } let(:service) { described_class.new(integration, current_user, params) } before_all do project.add_maintainer(user_with_permissions) end describe '#execute' do shared_examples 'error response' do |message| it 'has an informative message' do expect(response).to be_error expect(response.message).to eq(message) end end subject(:response) { service.execute } context 'when the current_user is anonymous' do let(:current_user) { nil } it_behaves_like 'error response', 'You have insufficient permissions to update this HTTP integration' end context 'when current_user does not have permission to create integrations' do let(:current_user) { user_without_permissions } it_behaves_like 'error response', 'You have insufficient permissions to update this HTTP integration' end context 'when an error occurs during update' do let(:params) { { name: '' } } it_behaves_like 'error response', "Name can't be blank" end context 'with name param' do let(:params) { { name: 'New Name' } } it 'successfully updates the integration' do expect(response).to be_success expect(response.payload[:integration].name).to eq('New Name') end end context 'with active param' do let(:params) { { active: true } } it 'successfully updates the integration' do expect(response).to be_success expect(response.payload[:integration]).to be_active end end context 'with regenerate_token flag' do let(:params) { { regenerate_token: true } } it 'successfully updates the integration' do previous_token = integration.token expect(response).to be_success expect(response.payload[:integration].token).not_to eq(previous_token) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module AlertManagement module HttpIntegrations class CreateService < BaseService def execute return error_no_permissions unless allowed? ::AlertManagement::HttpIntegration.transaction do integration = project.alert_management_http_integrations.build(permitted_params) if integration.save @response = success(integration) if too_many_integrations?(integration) @response = error_multiple_integrations raise ActiveRecord::Rollback end else @response = error_on_save(integration) end end @response end private def error_no_permissions error(_('You have insufficient permissions to create an HTTP integration for this project')) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe AlertManagement::HttpIntegrations::CreateService, feature_category: :incident_management do let_it_be(:user_with_permissions) { create(:user) } let_it_be(:user_without_permissions) { create(:user) } let_it_be_with_reload(:project) { create(:project) } let(:current_user) { user_with_permissions } let(:params) { {} } let(:service) { described_class.new(project, current_user, params) } before_all do project.add_maintainer(user_with_permissions) end describe '#execute' do shared_examples 'error response' do |message| it 'has an informative message' do expect(response).to be_error expect(response.message).to eq(message) end end subject(:response) { service.execute } context 'when the current_user is anonymous' do let(:current_user) { nil } it_behaves_like 'error response', 'You have insufficient permissions to create an HTTP integration for this project' end context 'when current_user does not have permission to create integrations' do let(:current_user) { user_without_permissions } it_behaves_like 'error response', 'You have insufficient permissions to create an HTTP integration for this project' end context 'when an error occurs during update' do it_behaves_like 'error response', "Name can't be blank" end context 'with valid params' do let(:params) { { name: 'New HTTP Integration', active: true } } it 'successfully creates an integration' do expect(response).to be_success integration = response.payload[:integration] expect(integration).to be_a(::AlertManagement::HttpIntegration) expect(integration.name).to eq('New HTTP Integration') expect(integration).to be_active expect(integration.token).to be_present expect(integration.endpoint_identifier).to be_present end context 'with an existing HTTP integration' do let_it_be(:http_integration) { create(:alert_management_http_integration, project: project) } it_behaves_like 'error response', 'Multiple integrations of a single type are not supported for this project' context 'when creating a different type of integration' do let(:params) { { type_identifier: :prometheus, name: 'Prometheus' } } it 'is successful' do expect(response).to be_success expect(response.payload[:integration]).to be_a(::AlertManagement::HttpIntegration) end end end context 'with an existing Prometheus integration' do let_it_be(:http_integration) { create(:alert_management_prometheus_integration, project: project) } context 'when creating a different type of integration' do it 'is successful' do expect(response).to be_success expect(response.payload[:integration]).to be_a(::AlertManagement::HttpIntegration) end end context 'when creating the same time of integration' do let(:params) { { type_identifier: :prometheus, name: 'Prometheus' } } it_behaves_like 'error response', 'Multiple integrations of a single type are not supported for this project' end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module AlertManagement module MetricImages class UploadService < BaseService attr_reader :alert, :file, :url, :url_text, :metric def initialize(alert, current_user, params = {}) super @alert = alert @file = params.fetch(:file) @url = params.fetch(:url, nil) @url_text = params.fetch(:url_text, nil) end def execute unless can_upload_metrics? return ServiceResponse.error( message: _("You are not authorized to upload metric images"), http_status: :forbidden ) end metric = AlertManagement::MetricImage.new( alert: alert, file: file, url: url, url_text: url_text ) if metric.save ServiceResponse.success(payload: { metric: metric, alert: alert }) else ServiceResponse.error(message: metric.errors.full_messages.join(', '), http_status: :bad_request) end end private def can_upload_metrics? current_user&.can?(:upload_alert_management_metric_image, alert) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe AlertManagement::MetricImages::UploadService, feature_category: :metrics do subject(:service) { described_class.new(alert, current_user, params) } let_it_be_with_refind(:project) { create(:project) } let_it_be_with_refind(:alert) { create(:alert_management_alert, project: project) } let_it_be_with_refind(:current_user) { create(:user) } let(:params) do { file: fixture_file_upload('spec/fixtures/rails_sample.jpg', 'image/jpg'), url: 'https://www.gitlab.com' } end describe '#execute' do subject { service.execute } shared_examples 'uploads the metric' do it 'uploads the metric and returns a success' do expect { subject }.to change(AlertManagement::MetricImage, :count).by(1) expect(subject.success?).to eq(true) expect(subject.payload).to match({ metric: instance_of(AlertManagement::MetricImage), alert: alert }) end end shared_examples 'no metric saved, an error given' do |message| it 'returns an error and does not upload', :aggregate_failures do expect(subject.success?).to eq(false) expect(subject.message).to match(a_string_matching(message)) expect(AlertManagement::MetricImage.count).to eq(0) end end context 'user does not have permissions' do it_behaves_like 'no metric saved, an error given', 'You are not authorized to upload metric images' end context 'user has permissions' do before_all do project.add_developer(current_user) end it_behaves_like 'uploads the metric' context 'no url given' do let(:params) do { file: fixture_file_upload('spec/fixtures/rails_sample.jpg', 'image/jpg') } end it_behaves_like 'uploads the metric' end context 'record invalid' do let(:params) do { file: fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain'), url: 'https://www.gitlab.com' } end it_behaves_like 'no metric saved, an error given', /File does not have a supported extension. Only png, jpg, jpeg, gif, bmp, tiff, ico, and webp are supported/ # rubocop: disable Layout/LineLength end context 'user is guest' do before_all do project.add_guest(current_user) end it_behaves_like 'no metric saved, an error given', 'You are not authorized to upload metric images' end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module AlertManagement module Alerts class UpdateService < ::BaseProjectService include Gitlab::Utils::StrongMemoize # @param alert [AlertManagement::Alert] # @param current_user [User] # @param params [Hash] Attributes of the alert def initialize(alert, current_user, params) @alert = alert @param_errors = [] @status = params.delete(:status) super(project: alert.project, current_user: current_user, params: params) end def execute return error_no_permissions unless allowed? filter_params return error_invalid_params if param_errors.any? # Save old assignees for system notes old_assignees = alert.assignees.to_a if alert.update(params) handle_changes(old_assignees: old_assignees) success else error(alert.errors.full_messages.to_sentence) end end private attr_reader :alert, :param_errors, :status def allowed? current_user&.can?(:update_alert_management_alert, alert) end def todo_service strong_memoize(:todo_service) do TodoService.new end end def success ServiceResponse.success(payload: { alert: alert }) end def error(message) ServiceResponse.error(payload: { alert: alert }, message: message) end def error_no_permissions error(_('You have no permissions')) end def error_invalid_params error(param_errors.to_sentence) end def add_param_error(message) param_errors << message end def param_errors? params.empty? && status.blank? end def filter_params param_errors << _('Please provide attributes to update') if param_errors? filter_status filter_assignees filter_duplicate end def handle_changes(old_assignees:) handle_assignement(old_assignees) if params[:assignees] handle_status_change if params[:status_event] end # ----- Assignee-related behavior ------ def filter_assignees return if params[:assignees].nil? # Always take first assignee while multiple are not currently supported params[:assignees] = Array(params[:assignees].first) param_errors << _('Assignee has no permissions') if unauthorized_assignees? end def unauthorized_assignees? params[:assignees]&.any? { |user| !user.can?(:read_alert_management_alert, alert) } end def handle_assignement(old_assignees) assign_todo(old_assignees) add_assignee_system_note(old_assignees) end def assign_todo(old_assignees) todo_service.reassigned_assignable(alert, current_user, old_assignees) end def add_assignee_system_note(old_assignees) SystemNoteService.change_issuable_assignees(alert, project, current_user, old_assignees) end # ------ Status-related behavior ------- def filter_status return unless status status_event = alert.status_event_for(status) unless status_event param_errors << _('Invalid status') return end params[:status_event] = status_event end def handle_status_change add_status_change_system_note resolve_todos if alert.resolved? end def add_status_change_system_note SystemNoteService.change_alert_status(alert, current_user) end def resolve_todos todo_service.resolve_todos_for_target(alert, current_user) end def filter_duplicate # Only need to check if changing to a not-resolved status return if params[:status_event].blank? || params[:status_event] == :resolve return unless alert.resolved? param_errors << unresolved_alert_error if duplicate_alert? end def duplicate_alert? return if alert.fingerprint.blank? unresolved_alert.present? end def unresolved_alert strong_memoize(:unresolved_alert) do AlertManagement::Alert.find_unresolved_alert(project, alert.fingerprint) end end def unresolved_alert_error _('An %{link_start}alert%{link_end} with the same fingerprint is already open. ' \ 'To change the status of this alert, resolve the linked alert.' ) % unresolved_alert_url_params end def unresolved_alert_url_params alert_path = Gitlab::Routing.url_helpers.details_project_alert_management_path(project, unresolved_alert) { link_start: '<a href="%{url}">'.html_safe % { url: alert_path }, link_end: '</a>'.html_safe } end end end end AlertManagement::Alerts::UpdateService.prepend_mod ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe AlertManagement::Alerts::UpdateService, feature_category: :incident_management do let_it_be(:user_with_permissions) { create(:user) } let_it_be(:other_user_with_permissions) { create(:user) } let_it_be(:user_without_permissions) { create(:user) } let_it_be(:project) { create(:project) } let_it_be(:alert, reload: true) { create(:alert_management_alert, :triggered, project: project) } let(:current_user) { user_with_permissions } let(:params) { {} } let(:service) { described_class.new(alert, current_user, params) } before_all do project.add_developer(user_with_permissions) project.add_developer(other_user_with_permissions) end describe '#execute' do shared_examples 'does not add a todo' do specify { expect { response }.not_to change(Todo, :count) } end shared_examples 'does not add a system note' do specify { expect { response }.not_to change(Note, :count) } end shared_examples 'adds a system note' do |note_matcher = nil| specify do expect { response }.to change { alert.reload.notes.count }.by(1) expect(alert.notes.last.note).to match(note_matcher) if note_matcher end end shared_examples 'error response' do |message| it_behaves_like 'does not add a todo' it_behaves_like 'does not add a system note' it 'has an informative message' do expect(response).to be_error expect(response.message).to eq(message) end end subject(:response) { service.execute } context 'when the current_user is nil' do let(:current_user) { nil } it_behaves_like 'error response', 'You have no permissions' end context 'when current_user does not have permission to update alerts' do let(:current_user) { user_without_permissions } it_behaves_like 'error response', 'You have no permissions' end context 'when no parameters are included' do it_behaves_like 'error response', 'Please provide attributes to update' end context 'when an error occurs during update' do let(:params) { { title: nil } } it_behaves_like 'error response', "Title can't be blank" end shared_examples 'title update' do it_behaves_like 'does not add a todo' it_behaves_like 'does not add a system note' it 'updates the attribute' do original_title = alert.title expect { response }.to change { alert.title }.from(original_title).to(expected_title) expect(response).to be_success end end context 'when a model attribute is included without assignees' do let(:params) { { title: 'This is an updated alert.' } } let(:expected_title) { params[:title] } it_behaves_like 'title update' end context 'when alert is resolved and another existing unresolved alert' do let!(:alert) { create(:alert_management_alert, :resolved, project: project) } let!(:existing_alert) { create(:alert_management_alert, :triggered, project: project) } let(:params) { { title: 'This is an updated alert.' } } let(:expected_title) { params[:title] } it_behaves_like 'title update' end context 'when assignees are included' do shared_examples 'adds a todo' do let(:assignee) { expected_assignees.first } specify do expect { response }.to change { assignee.reload.todos.count }.by(1) expect(assignee.todos.last.author).to eq(current_user) end end shared_examples 'successful assignment' do it_behaves_like 'adds a system note' it_behaves_like 'adds a todo' after do alert.assignees = [] end specify do expect { response }.to change { alert.reload.assignees }.from([]).to(expected_assignees) expect(response).to be_success end end let(:expected_assignees) { params[:assignees] } context 'when the assignee is the current user' do let(:params) { { assignees: [current_user] } } it_behaves_like 'successful assignment' end context 'when the assignee has read permissions' do let(:params) { { assignees: [other_user_with_permissions] } } it_behaves_like 'successful assignment' end context 'when the assignee does not have read permissions' do let(:params) { { assignees: [user_without_permissions] } } it_behaves_like 'error response', 'Assignee has no permissions' end context 'when user is already assigned' do let(:params) { { assignees: [user_with_permissions] } } before do alert.assignees << user_with_permissions end it_behaves_like 'does not add a system note' it_behaves_like 'does not add a todo' end context 'with multiple users included' do let(:params) { { assignees: [user_with_permissions, user_without_permissions] } } let(:expected_assignees) { [user_with_permissions] } it_behaves_like 'successful assignment' end end context 'when a status is included' do let(:params) { { status: new_status } } let(:new_status) { :acknowledged } it 'successfully changes the status' do expect { response }.to change { alert.acknowledged? }.to(true) expect(response).to be_success expect(response.payload[:alert]).to eq(alert) end it_behaves_like 'adds a system note' context 'with unknown status' do let(:new_status) { :unknown_status } it_behaves_like 'error response', 'Invalid status' end context 'with resolving status' do let(:new_status) { :resolved } it 'changes the status' do expect { response }.to change { alert.resolved? }.to(true) end it "resolves the current user's related todos" do todo = create(:todo, :pending, target: alert, user: current_user, project: alert.project) expect { response }.to change { todo.reload.state }.from('pending').to('done') end end context 'with existing unresolved alert' do context 'with fingerprints' do let_it_be(:existing_alert) { create(:alert_management_alert, :triggered, fingerprint: alert.fingerprint, project: project) } it 'does not query for existing alerts' do expect(::AlertManagement::Alert).not_to receive(:find_unresolved_alert) response end context 'when status was resolved' do let_it_be(:alert) { create(:alert_management_alert, :resolved, :with_fingerprint, project: project) } let_it_be(:existing_alert) { create(:alert_management_alert, :triggered, fingerprint: alert.fingerprint, project: project) } let(:url) { Gitlab::Routing.url_helpers.details_project_alert_management_path(project, existing_alert) } let(:link) { ActionController::Base.helpers.link_to(_('alert'), url) } let(:message) do "An #{link} with the same fingerprint is already open. " \ 'To change the status of this alert, resolve the linked alert.' end it_behaves_like 'does not add a todo' it_behaves_like 'does not add a system note' it 'has an informative message' do expect(response).to be_error expect(response.message).to eq(message) end end end context 'without fingerprints' do let_it_be(:existing_alert) { create(:alert_management_alert, :triggered, fingerprint: alert.fingerprint, project: project) } it 'successfully changes the status' do expect { response }.to change { alert.acknowledged? }.to(true) expect(response).to be_success expect(response.payload[:alert]).to eq(alert) end it_behaves_like 'adds a system note' end end context 'two existing closed alerts' do let_it_be(:alert) { create(:alert_management_alert, :resolved, :with_fingerprint, project: project) } let_it_be(:existing_alert) { create(:alert_management_alert, :resolved, fingerprint: alert.fingerprint, project: project) } it 'successfully changes the status' do expect { response }.to change { alert.acknowledged? }.to(true) expect(response).to be_success expect(response.payload[:alert]).to eq(alert) end it_behaves_like 'adds a system note' end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module AlertManagement module Alerts module Todo class CreateService # @param alert [AlertManagement::Alert] # @param current_user [User] def initialize(alert, current_user) @alert = alert @current_user = current_user end def execute return error_no_permissions unless allowed? todos = TodoService.new.mark_todo(alert, current_user) todo = todos&.first return error_existing_todo unless todo success(todo) end private attr_reader :alert, :current_user def allowed? current_user&.can?(:update_alert_management_alert, alert) end def error(message) ServiceResponse.error(payload: { alert: alert, todo: nil }, message: message) end def success(todo) ServiceResponse.success(payload: { alert: alert, todo: todo }) end def error_no_permissions error(_('You have insufficient permissions to create a Todo for this alert')) end def error_existing_todo error(_('You already have pending todo for this alert')) end end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe AlertManagement::Alerts::Todo::CreateService, feature_category: :incident_management do let_it_be(:user) { create(:user) } let_it_be(:alert) { create(:alert_management_alert) } let(:current_user) { user } describe '#execute' do subject(:result) { described_class.new(alert, current_user).execute } shared_examples 'permissions error' do it 'returns an error', :aggregate_failures do expect(result.error?).to be(true) expect(result.message).to eq('You have insufficient permissions to create a Todo for this alert') expect(result.payload[:todo]).to be(nil) expect(result.payload[:alert]).to be(alert) end end context 'when the user is anonymous' do let(:current_user) { nil } it_behaves_like 'permissions error' end context 'when the user does not have permission' do it_behaves_like 'permissions error' end context 'when user has permission' do before do alert.project.add_developer(user) end it 'creates a todo' do expect { result }.to change { Todo.count }.by(1) end it 'returns the alert and todo in the payload', :aggregate_failures do expect(result.success?).to be(true) expect(result.payload[:alert][:id]).to be(alert.id) expect(result.payload[:todo][:id]).to be(Todo.last.id) end context 'when the user has a marked todo for the alert' do let_it_be(:todo_params) do { project: alert.project, target: alert, user: user, action: Todo::MARKED } end context 'when todo is pending' do before_all do create(:todo, :pending, **todo_params) end before do stub_feature_flags(multiple_todos: false) end it 'does not create a todo' do expect { result }.not_to change { Todo.count } end it 'returns an error', :aggregate_failures do expect(result.error?).to be(true) expect(result.message).to be('You already have pending todo for this alert') expect(result.payload[:todo]).to be(nil) expect(result.payload[:alert]).to be(alert) end end context 'when todo is done' do before do create(:todo, :done, **todo_params) end it { expect(result.success?).to be(true) } it { expect { result }.to change { Todo.count }.by(1) } end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Suggestions class OutdateService def execute(merge_request) # rubocop: disable CodeReuse/ActiveRecord suggestions = merge_request.suggestions.active.includes(:note) suggestions.find_in_batches(batch_size: 100) do |group| outdatable_suggestion_ids = group.select do |suggestion| suggestion.outdated?(cached: false) end.map(&:id) Suggestion.where(id: outdatable_suggestion_ids).update_all(outdated: true) end # rubocop: enable CodeReuse/ActiveRecord end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Suggestions::OutdateService, feature_category: :code_suggestions do describe '#execute' do let(:merge_request) { create(:merge_request) } let(:project) { merge_request.target_project } let(:user) { merge_request.author } let(:file_path) { 'files/ruby/popen.rb' } let(:branch_name) { project.default_branch } let(:diff_file) { suggestion.diff_file } let(:position) { build_position(file_path, comment_line) } let(:note) do create( :diff_note_on_merge_request, noteable: merge_request, position: position, project: project ) end def build_position(path, line) Gitlab::Diff::Position.new( old_path: path, new_path: path, old_line: nil, new_line: line, diff_refs: merge_request.diff_refs ) end def commit_changes(file_path, new_content) params = { file_path: file_path, commit_message: "Update File", file_content: new_content, start_project: project, start_branch: project.default_branch, branch_name: branch_name } Files::UpdateService.new(project, user, params).execute end def update_file_line(diff_file, change_line, content) new_lines = diff_file.new_blob.data.lines new_lines[change_line..change_line] = content result = commit_changes(diff_file.file_path, new_lines.join) newrev = result[:result] expect(result[:status]).to eq(:success) expect(newrev).to be_present # Ensure all memoized data is cleared in order # to generate the new merge_request_diff. MergeRequest.find(merge_request.id).reload_diff(user) note.reload end before do project.add_maintainer(user) end subject { described_class.new.execute(merge_request) } context 'when there is a change within multi-line suggestion range' do let(:comment_line) { 9 } let(:lines_above) { 8 } # suggesting to change lines 1..9 let(:change_line) { 2 } # line 2 is within the range let!(:suggestion) do create(:suggestion, :content_from_repo, note: note, lines_above: lines_above) end it 'updates the outdatable suggestion record' do update_file_line(diff_file, change_line, "# foo\nbar\n") # Make sure note is still active expect(note.active?).to be(true) expect { subject }.to change { suggestion.reload.outdated } .from(false).to(true) end end context 'when there is no change within multi-line suggestion range' do let(:comment_line) { 9 } let(:lines_above) { 3 } # suggesting to change lines 6..9 let(:change_line) { 2 } # line 2 is not within the range let!(:suggestion) do create(:suggestion, :content_from_repo, note: note, lines_above: lines_above) end subject { described_class.new.execute(merge_request) } it 'does not outdates suggestion record' do update_file_line(diff_file, change_line, "# foo\nbar\n") # Make sure note is still active expect(note.active?).to be(true) expect { subject }.not_to change { suggestion.reload.outdated }.from(false) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Suggestions class ApplyService < ::BaseService def initialize(current_user, *suggestions, message: nil) @current_user = current_user @message = message @suggestion_set = Gitlab::Suggestions::SuggestionSet.new(suggestions) end def execute if suggestion_set.valid? result else error(suggestion_set.error_message) end end private attr_reader :current_user, :suggestion_set def result multi_service.execute.tap do |result| update_suggestions(result) end end def update_suggestions(result) return unless result[:status] == :success Suggestion.id_in(suggestion_set.suggestions) .update_all(commit_id: result[:result], applied: true) Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter .track_apply_suggestion_action(user: current_user, suggestions: suggestion_set.suggestions) end def author authors = suggestion_set.authors return unless authors.one? Gitlab::Git::User.from_gitlab(authors.first) end def multi_service params = { commit_message: commit_message, branch_name: suggestion_set.branch, start_branch: suggestion_set.branch, actions: suggestion_set.actions, author_name: author&.name, author_email: author&.email } ::Files::MultiService.new(suggestion_set.source_project, current_user, params) end def commit_message Gitlab::Suggestions::CommitMessage.new(current_user, suggestion_set, @message).message end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Suggestions::ApplyService, feature_category: :code_suggestions do include ProjectForksHelper def build_position(**optional_args) args = { old_path: "files/ruby/popen.rb", new_path: "files/ruby/popen.rb", old_line: nil, new_line: 9, diff_refs: merge_request.diff_refs, **optional_args } Gitlab::Diff::Position.new(args) end def create_suggestion(args) position_args = args.slice(:old_path, :new_path, :old_line, :new_line) content_args = args.slice(:from_content, :to_content) position = build_position(**position_args) diff_note = create( :diff_note_on_merge_request, noteable: merge_request, position: position, project: project ) suggestion_args = { note: diff_note }.merge(content_args) create(:suggestion, :content_from_repo, suggestion_args) end def apply(suggestions, custom_message = nil) result = apply_service.new(user, *suggestions, message: custom_message).execute suggestions.map(&:reload) expect(result[:status]).to eq(:success) end shared_examples 'successfully creates commit and updates suggestions' do it 'updates the files with the new content' do apply(suggestions) suggestions.each do |suggestion| path = suggestion.diff_file.file_path blob = project.repository.blob_at_branch(merge_request.source_branch, path) expect(blob.data).to eq(expected_content_by_path[path.to_sym]) end end it 'updates suggestion applied and commit_id columns' do expect(suggestions.map(&:applied)).to all(be false) expect(suggestions.map(&:commit_id)).to all(be nil) apply(suggestions) expect(suggestions.map(&:applied)).to all(be true) expect(suggestions.map(&:commit_id)).to all(be_present) end it 'created commit has users email and name' do apply(suggestions) commit = project.repository.commit author = suggestions.first.note.author expect(user.commit_email).not_to eq(user.email) expect(commit.author_email).to eq(author.commit_email_or_default) expect(commit.committer_email).to eq(user.commit_email) expect(commit.author_name).to eq(author.name) expect(commit.committer_name).to eq(user.name) end it 'tracks apply suggestion event' do expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter) .to receive(:track_apply_suggestion_action) .with(user: user, suggestions: suggestions) apply(suggestions) end context 'when a custom suggestion commit message' do before do project.update!(suggestion_commit_message: message) apply(suggestions) end context 'is not specified' do let(:message) { '' } it 'uses the default commit message' do expect(project.repository.commit.message).to( match(/\AApply #{suggestions.size} suggestion\(s\) to \d+ file\(s\)\z/) ) end end context 'is specified' do let(:message) do 'refactor: %{project_name} %{branch_name} %{username}' end it 'generates a custom commit message' do expect(project.repository.commit.message).to( eq("refactor: Project_1 master test.user") ) end end end context 'with a user suggested commit message' do let(:message) { "i'm a custom commit message!" } it "uses the user's commit message" do apply(suggestions, message) expect(project.repository.commit.message).to(eq(message)) end end end subject(:apply_service) { described_class } let_it_be(:user) do create(:user, :commit_email, name: 'Test User', username: 'test.user') end let(:project) do create(:project, :repository, path: 'project-1', name: 'Project_1') end let(:merge_request) do create(:merge_request, source_project: project, target_project: project, source_branch: 'master') end let(:position) { build_position } let(:diff_note) do create(:diff_note_on_merge_request, noteable: merge_request, position: position, project: project) end let(:suggestion) do create(:suggestion, :content_from_repo, note: diff_note, to_content: " raise RuntimeError, 'Explosion'\n # explosion?\n") end let(:suggestion2) do create_suggestion( to_content: " *** SUGGESTION CHANGE ***\n", new_line: 15) end let(:suggestion3) do create_suggestion( to_content: " *** ANOTHER SUGGESTION CHANGE ***\n", old_path: "files/ruby/regex.rb", new_path: "files/ruby/regex.rb", new_line: 22) end let(:suggestions) { [suggestion, suggestion2, suggestion3] } context 'patch is appliable' do let(:popen_content) do <<-CONTENT.strip_heredoc require 'fileutils' require 'open3' module Popen extend self def popen(cmd, path=nil) unless cmd.is_a?(Array) raise RuntimeError, 'Explosion' # explosion? end path ||= Dir.pwd vars = { *** SUGGESTION CHANGE *** } options = { chdir: path } unless File.directory?(path) FileUtils.mkdir_p(path) end @cmd_output = "" @cmd_status = 0 Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr| @cmd_output << stdout.read @cmd_output << stderr.read @cmd_status = wait_thr.value.exitstatus end return @cmd_output, @cmd_status end end CONTENT end let(:regex_content) do <<-CONTENT.strip_heredoc module Gitlab module Regex extend self def username_regex default_regex end def project_name_regex /\\A[a-zA-Z0-9][a-zA-Z0-9_\\-\\. ]*\\z/ end def name_regex /\\A[a-zA-Z0-9_\\-\\. ]*\\z/ end def path_regex default_regex end def archive_formats_regex *** ANOTHER SUGGESTION CHANGE *** end def git_reference_regex # Valid git ref regex, see: # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html %r{ (?! (?# doesn't begins with) \\/| (?# rule #6) (?# doesn't contain) .*(?: [\\\/.]\\\.| (?# rule #1,3) \\/\\/| (?# rule #6) @\\{| (?# rule #8) \\\\ (?# rule #9) ) ) [^\\000-\\040\\177~^:?*\\[]+ (?# rule #4-5) (?# doesn't end with) (?<!\\.lock) (?# rule #1) (?<![\\/.]) (?# rule #6-7) }x end protected def default_regex /\\A[.?]?[a-zA-Z0-9][a-zA-Z0-9_\\-\\.]*(?<!\\.git)\\z/ end end end CONTENT end let(:expected_content_by_path) do { "files/ruby/popen.rb": popen_content, "files/ruby/regex.rb": regex_content } end context 'non-fork project' do before do project.add_maintainer(user) end it_behaves_like 'successfully creates commit and updates suggestions' context 'when it fails to apply because a file was changed' do before do params = { file_path: suggestion3.diff_file.file_path, start_branch: suggestion3.branch, branch_name: suggestion3.branch, commit_message: 'Update file', file_content: 'New content' } # Reload the suggestion so it's memoized values get reset after the # file was changed. suggestion3.reload Files::UpdateService.new(project, user, params).execute end it 'returns error message' do result = apply_service.new(user, suggestion, suggestion3, suggestion2).execute expect(result).to eq(message: 'A file has been changed.', status: :error) end end context 'when HEAD from position is different from source branch HEAD on repo' do it 'returns error message' do allow(suggestion).to receive(:appliable?).and_return(true) allow(suggestion.position).to receive(:head_sha).and_return('old-sha') allow(suggestion.noteable).to receive(:source_branch_sha).and_return('new-sha') result = apply_service.new(user, suggestion).execute expect(result).to eq(message: 'A file has been changed.', status: :error) end end context 'single suggestion' do let(:author) { suggestions.first.note.author } let(:commit) { project.repository.commit } context 'author of suggestion applies suggestion' do before do suggestion.note.update!(author_id: user.id) apply(suggestions) end it 'created commit by same author and committer' do expect(user.commit_email).to eq(author.commit_email_or_default) expect(author).to eq(user) expect(commit.author_email).to eq(author.commit_email_or_default) expect(commit.committer_email).to eq(user.commit_email) expect(commit.author_name).to eq(author.name) expect(commit.committer_name).to eq(user.name) end end context 'another user applies suggestion' do before do apply(suggestions) end it 'created commit has authors info and commiters info' do expect(user.commit_email).not_to eq(user.email) expect(author).not_to eq(user) expect(commit.author_email).to eq(author.commit_email_or_default) expect(commit.committer_email).to eq(user.commit_email) expect(commit.author_name).to eq(author.name) expect(commit.committer_name).to eq(user.name) end end end context 'multiple suggestions' do let(:author_emails) { suggestions.map { |s| s.note.author.commit_email_or_default } } let(:first_author) { suggestion.note.author } let(:commit) { project.repository.commit } context 'when all the same author' do before do apply(suggestions) end it 'uses first authors information' do expect(author_emails).to include(first_author.commit_email_or_default).exactly(3) expect(commit.author_email).to eq(first_author.commit_email_or_default) end end context 'when all different authors' do before do suggestion2.note.update!(author_id: create(:user).id) suggestion3.note.update!(author_id: create(:user).id) apply(suggestions) end it 'uses committers information' do expect(commit.author_email).to eq(user.commit_email) expect(commit.committer_email).to eq(user.commit_email) end end end context 'multiple suggestions applied sequentially' do def apply_suggestion(suggestion) suggestion.reload merge_request.reload merge_request.clear_memoized_shas result = apply_service.new(user, suggestion).execute suggestion.reload expect(result[:status]).to eq(:success) refresh = MergeRequests::RefreshService.new(project: project, current_user: user) refresh.execute( merge_request.diff_head_sha, suggestion.commit_id, merge_request.source_branch_ref ) result end def fetch_raw_diff(suggestion) project.reload.commit(suggestion.commit_id) .diffs.diff_files.first.diff.diff end it 'applies multiple suggestions in subsequent versions correctly' do suggestion1 = create_suggestion( from_content: "\n", to_content: "# v1 change\n", old_line: nil, new_line: 13) suggestion2 = create_suggestion( from_content: " @cmd_output << stderr.read\n", to_content: "# v2 change\n", old_line: 24, new_line: 31) apply_suggestion(suggestion1) apply_suggestion(suggestion2) suggestion1_diff = fetch_raw_diff(suggestion1) suggestion2_diff = fetch_raw_diff(suggestion2) expected_suggestion1_diff = <<-CONTENT.strip_heredoc @@ -10,7 +10,7 @@ module Popen end path ||= Dir.pwd - +# v1 change vars = { "PWD" => path } CONTENT expected_suggestion2_diff = <<-CONTENT.strip_heredoc @@ -28,7 +28,7 @@ module Popen Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr| @cmd_output << stdout.read - @cmd_output << stderr.read +# v2 change @cmd_status = wait_thr.value.exitstatus end CONTENT expect(suggestion1_diff.strip).to eq(expected_suggestion1_diff.strip) expect(suggestion2_diff.strip).to eq(expected_suggestion2_diff.strip) end end context 'multi-line suggestion' do let(:popen_content) do <<~CONTENT.strip_heredoc require 'fileutils' require 'open3' module Popen extend self # multi # line vars = { "PWD" => path } options = { chdir: path } unless File.directory?(path) FileUtils.mkdir_p(path) end @cmd_output = "" @cmd_status = 0 Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr| @cmd_output << stdout.read @cmd_output << stderr.read @cmd_status = wait_thr.value.exitstatus end return @cmd_output, @cmd_status end end CONTENT end let(:expected_content_by_path) do { "files/ruby/popen.rb": popen_content } end let(:suggestion) do create(:suggestion, :content_from_repo, note: diff_note, lines_above: 2, lines_below: 3, to_content: "# multi\n# line\n") end let(:suggestions) { [suggestion] } it_behaves_like 'successfully creates commit and updates suggestions' end context 'remove an empty line suggestion' do let(:popen_content) do <<~CONTENT.strip_heredoc require 'fileutils' require 'open3' module Popen extend self def popen(cmd, path=nil) unless cmd.is_a?(Array) raise RuntimeError, "System commands must be given as an array of strings" end path ||= Dir.pwd vars = { "PWD" => path } options = { chdir: path } unless File.directory?(path) FileUtils.mkdir_p(path) end @cmd_output = "" @cmd_status = 0 Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr| @cmd_output << stdout.read @cmd_output << stderr.read @cmd_status = wait_thr.value.exitstatus end return @cmd_output, @cmd_status end end CONTENT end let(:expected_content_by_path) do { "files/ruby/popen.rb": popen_content } end let(:suggestion) do create_suggestion(to_content: "", new_line: 13) end let(:suggestions) { [suggestion] } it_behaves_like 'successfully creates commit and updates suggestions' end end context 'fork-project' do let(:project) { create(:project, :public, :repository) } let(:forked_project) do fork_project_with_submodules(project, user) end let(:merge_request) do create( :merge_request, source_branch: 'conflict-resolvable-fork', source_project: forked_project, target_branch: 'conflict-start', target_project: project ) end let!(:diff_note) do create( :diff_note_on_merge_request, noteable: merge_request, position: position, project: project ) end before do project.add_maintainer(user) end it 'updates file in the source project' do expect(Files::MultiService).to receive(:new) .with(merge_request.source_project, user, anything) .and_call_original apply_service.new(user, suggestion).execute end end end context 'no permission' do let(:merge_request) do create(:merge_request, source_project: project, target_project: project) end let(:diff_note) do create(:diff_note_on_merge_request, noteable: merge_request, position: position, project: project) end context 'user cannot write in project repo' do before do project.add_reporter(user) end it 'returns error' do result = apply_service.new(user, suggestion).execute expect(result).to eq( message: "You are not allowed to push into this branch", status: :error ) end end end context 'patch is not appliable' do let(:merge_request) do create(:merge_request, source_project: project, target_project: project) end let(:diff_note) do create(:diff_note_on_merge_request, noteable: merge_request, position: position, project: project) end before do project.add_maintainer(user) end shared_examples_for 'service not tracking apply suggestion event' do it 'does not track apply suggestion event' do expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter) .not_to receive(:track_apply_suggestion_action) result end end context 'diff file was not found' do let(:result) { apply_service.new(user, suggestion).execute } before do expect(suggestion.note).to receive(:latest_diff_file).and_return(nil) end it 'returns error message' do expect(result).to eq(message: 'A file was not found.', status: :error) end it_behaves_like 'service not tracking apply suggestion event' end context 'when not all suggestions belong to the same branch' do let(:merge_request2) do create( :merge_request, :conflict, source_project: project, target_project: project ) end let(:position2) do Gitlab::Diff::Position.new( old_path: "files/ruby/popen.rb", new_path: "files/ruby/popen.rb", old_line: nil, new_line: 15, diff_refs: merge_request2.diff_refs ) end let(:diff_note2) do create( :diff_note_on_merge_request, noteable: merge_request2, position: position2, project: project ) end let(:other_branch_suggestion) { create(:suggestion, note: diff_note2) } let(:result) { apply_service.new(user, suggestion, other_branch_suggestion).execute } it 'renders error message' do expect(result).to eq( message: 'Suggestions must all be on the same branch.', status: :error ) end it_behaves_like 'service not tracking apply suggestion event' end context 'suggestion is not appliable' do let(:inapplicable_reason) { "Can't apply this suggestion." } let(:result) { apply_service.new(user, suggestion).execute } before do expect(suggestion).to receive(:appliable?).and_return(false) expect(suggestion).to receive(:inapplicable_reason).and_return(inapplicable_reason) end it 'returns error message' do expect(result).to eq(message: inapplicable_reason, status: :error) end it_behaves_like 'service not tracking apply suggestion event' end context 'lines of suggestions overlap' do let(:suggestion) do create_suggestion( to_content: " raise RuntimeError, 'Explosion'\n # explosion?\n") end let(:overlapping_suggestion) do create_suggestion(to_content: "I Overlap!") end let(:result) { apply_service.new(user, suggestion, overlapping_suggestion).execute } it 'returns error message' do expect(result).to eq( message: 'Suggestions are not applicable as their lines cannot overlap.', status: :error ) end it_behaves_like 'service not tracking apply suggestion event' end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Suggestions class CreateService def initialize(note) @note = note end def execute return unless @note.supports_suggestion? suggestions = Gitlab::Diff::SuggestionsParser.parse( @note.note, project: @note.project, position: @note.position ) rows = suggestions.map.with_index do |suggestion, index| creation_params = suggestion.to_hash.slice( :from_content, :to_content, :lines_above, :lines_below ) creation_params.merge!(note_id: @note.id, relative_order: index) end rows.in_groups_of(100, false) do |rows| ApplicationRecord.legacy_bulk_insert('suggestions', rows) # rubocop:disable Gitlab/BulkInsert end Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter.track_add_suggestion_action(note: @note) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Suggestions::CreateService, feature_category: :code_suggestions do let(:project_with_repo) { create(:project, :repository) } let(:merge_request) do create( :merge_request, source_project: project_with_repo, target_project: project_with_repo ) end def build_position(args = {}) default_args = { old_path: "files/ruby/popen.rb", new_path: "files/ruby/popen.rb", old_line: nil, new_line: 14, diff_refs: merge_request.diff_refs } Gitlab::Diff::Position.new(default_args.merge(args)) end let(:position) { build_position } let(:markdown) do <<-MARKDOWN.strip_heredoc ```suggestion foo bar ``` ``` nothing ``` ```suggestion xpto baz ``` ```thing this is not a suggestion, it's a thing ``` ```suggestion:-3+2 # multi-line suggestion 1 ``` ```suggestion:-5 # multi-line suggestion 1 ``` MARKDOWN end subject { described_class.new(note) } shared_examples_for 'service not tracking add suggestion event' do it 'does not track add suggestion event' do expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter) .not_to receive(:track_add_suggestion_action) subject.execute end end describe '#execute' do context 'should not try to parse suggestions' do context 'when not a diff note for merge requests' do let(:note) do create(:diff_note_on_commit, project: project_with_repo, note: markdown) end it 'does not try to parse suggestions' do expect(Gitlab::Diff::SuggestionsParser).not_to receive(:parse) subject.execute end it_behaves_like 'service not tracking add suggestion event' end context 'when diff note is not for text' do let(:note) do create( :diff_note_on_merge_request, project: project_with_repo, noteable: merge_request, position: position, note: markdown ) end before do allow(note).to receive(:on_text?) { false } end it 'does not try to parse suggestions' do expect(Gitlab::Diff::SuggestionsParser).not_to receive(:parse) subject.execute end it_behaves_like 'service not tracking add suggestion event' end end context 'when diff file is not found' do let(:note) do create( :diff_note_on_merge_request, project: project_with_repo, noteable: merge_request, position: position, note: markdown ) end before do expect_next_instance_of(DiffNote) do |diff_note| expect(diff_note).to receive(:latest_diff_file).once { nil } end end it 'creates no suggestion' do expect { subject.execute }.not_to change(Suggestion, :count) end it_behaves_like 'service not tracking add suggestion event' end context 'should create suggestions' do let(:note) do create( :diff_note_on_merge_request, project: project_with_repo, noteable: merge_request, position: position, note: markdown ) end let(:expected_suggestions) do Gitlab::Diff::SuggestionsParser.parse( markdown, project: note.project, position: note.position ) end it 'persists suggestion records' do expect { subject.execute }.to change { note.suggestions.count } .from(0).to(expected_suggestions.size) end it 'persists suggestions data correctly' do subject.execute suggestions = note.suggestions.order(:relative_order) suggestions.zip(expected_suggestions) do |suggestion, expected_suggestion| expected_data = expected_suggestion.to_hash expect(suggestion.from_content).to eq(expected_data[:from_content]) expect(suggestion.to_content).to eq(expected_data[:to_content]) expect(suggestion.lines_above).to eq(expected_data[:lines_above]) expect(suggestion.lines_below).to eq(expected_data[:lines_below]) end end it 'tracks add suggestion event' do expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter) .to receive(:track_add_suggestion_action) .with(note: note) subject.execute end context 'outdated position note' do let!(:outdated_diff) { merge_request.merge_request_diff } let!(:latest_diff) { merge_request.create_merge_request_diff } let(:outdated_position) { build_position(diff_refs: outdated_diff.diff_refs) } let(:position) { build_position(diff_refs: latest_diff.diff_refs) } it 'uses the correct position when creating the suggestion' do expect(Gitlab::Diff::SuggestionsParser).to receive(:parse) .with(note.note, project: note.project, position: note.position) .and_call_original subject.execute end end context 'when a patch removes an empty line' do let(:markdown) do <<-MARKDOWN.strip_heredoc ```suggestion ``` MARKDOWN end let(:position) { build_position(new_line: 13) } it 'creates an appliable suggestion' do subject.execute suggestion = note.suggestions.last expect(suggestion).to be_appliable expect(suggestion.from_content).to eq("\n") expect(suggestion.to_content).to eq("") end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Mattermost class CreateTeamService < ::BaseService def initialize(group, current_user) @group = group @current_user = current_user end def execute # The user that creates the team will be Team Admin ::Mattermost::Team.new(current_user).create(**@group.mattermost_team_params) rescue ::Mattermost::ClientError => e @group.errors.add(:mattermost_team, e.message) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Mattermost::CreateTeamService, feature_category: :integrations do let(:user) { create(:user) } let(:group) { create(:group) } subject { described_class.new(group, user) } it 'creates a team' do expect_next_instance_of(::Mattermost::Team) do |instance| expect(instance).to receive(:create).with(name: anything, display_name: anything, type: anything) end subject.execute end it 'adds an error if a team could not be created' do expect_next_instance_of(::Mattermost::Team) do |instance| expect(instance).to receive(:create).and_raise(::Mattermost::ClientError, 'client error') end subject.execute expect(group.errors).to be_present end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Branches class ValidateNewService < BaseService def initialize(project) @project = project end def execute(branch_name, force: false) return error('Branch name is invalid') unless valid_name?(branch_name) if branch_exist?(branch_name) && !force return error('Branch already exists') end success rescue Gitlab::Git::PreReceiveError => ex error(ex.message) end private def valid_name?(branch_name) Gitlab::GitRefValidator.validate(branch_name) end def branch_exist?(branch_name) project.repository.branch_exists?(branch_name) end end end Branches::ValidateNewService.prepend_mod ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Branches::ValidateNewService, feature_category: :source_code_management do let(:project) { create(:project, :repository) } subject(:service) { described_class.new(project) } describe '#execute' do context 'validation' do it 'returns error with an invalid branch name' do result = service.execute('refs/heads/invalid_branch') expect(result[:status]).to eq(:error) expect(result[:message]).to eq('Branch name is invalid') end it 'returns success with a valid branch name' do result = service.execute('valid_branch_name') expect(result[:status]).to eq(:success) end end context 'branch exist' do it 'returns error when branch exists' do result = service.execute('master') expect(result[:status]).to eq(:error) expect(result[:message]).to eq('Branch already exists') end it 'returns success when branch name is available' do result = service.execute('valid_branch_name') expect(result[:status]).to eq(:success) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Branches class DeleteMergedService < BaseService def async_execute DeleteMergedBranchesWorker.perform_async(project.id, current_user.id) end def execute raise Gitlab::Access::AccessDeniedError unless can?(current_user, :push_code, project) branches = project.repository.merged_branch_names # Prevent deletion of branches relevant to open merge requests branches -= merge_request_branch_names # Prevent deletion of protected branches branches = branches.reject { |branch| ProtectedBranch.protected?(project, branch) } branches.each do |branch| ::Branches::DeleteService.new(project, current_user).execute(branch) end end private # rubocop: disable CodeReuse/ActiveRecord def merge_request_branch_names # reorder(nil) is necessary for SELECT DISTINCT because default scope adds an ORDER BY source_names = project.origin_merge_requests.opened.reorder(nil).distinct.pluck(:source_branch) target_names = project.merge_requests.opened.reorder(nil).distinct.pluck(:target_branch) (source_names + target_names).uniq end # rubocop: enable CodeReuse/ActiveRecord end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Branches::DeleteMergedService, feature_category: :source_code_management do include ProjectForksHelper subject(:service) { described_class.new(project, project.first_owner) } let(:project) { create(:project, :repository) } describe '#execute' do it 'deletes a branch that was merged' do service.execute expect(project.repository.branch_names).not_to include('improve/awesome') end it 'keeps branch that is unmerged' do service.execute expect(project.repository.branch_names).to include('feature') end it 'keeps "master"' do service.execute expect(project.repository.branch_names).to include('master') end it 'keeps protected branches' do create(:protected_branch, project: project, name: 'improve/awesome') service.execute expect(project.repository.branch_names).to include('improve/awesome') end it 'keeps wildcard protected branches' do create(:protected_branch, project: project, name: 'improve/*') service.execute expect(project.repository.branch_names).to include('improve/awesome') end it 'ignores protected tags' do create(:protected_tag, project: project, name: 'improve/*') service.execute expect(project.repository.branch_names).not_to include('improve/awesome') end context 'user without rights' do let(:user) { create(:user) } it 'cannot execute' do expect { described_class.new(project, user).execute }.to raise_error(Gitlab::Access::AccessDeniedError) end end context 'open merge requests' do it 'does not delete branches from open merge requests' do forked_project = fork_project(project) create(:merge_request, :opened, source_project: project, target_project: project, source_branch: 'branch-merged', target_branch: 'master') create(:merge_request, :opened, source_project: forked_project, target_project: project, target_branch: 'improve/awesome', source_branch: 'master') service.execute expect(project.repository.branch_names).to include('branch-merged') expect(project.repository.branch_names).to include('improve/awesome') end end end describe '#async_execute' do it 'calls DeleteMergedBranchesWorker async' do expect(DeleteMergedBranchesWorker).to receive(:perform_async) service.async_execute end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Branches class DivergingCommitCountsService def initialize(repository) @repository = repository @cache = Gitlab::RepositoryCache.new(repository) end def call(branch) diverging_commit_counts(branch) end private attr_reader :repository, :cache delegate :raw_repository, to: :repository def diverging_commit_counts(branch) @root_ref_hash ||= raw_repository.commit(repository.root_ref).id cache.fetch(:"diverging_commit_counts_#{branch.name}") do number_commits_behind, number_commits_ahead = raw_repository.diverging_commit_count( @root_ref_hash, branch.dereferenced_target.sha) { behind: number_commits_behind, ahead: number_commits_ahead } end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Branches::DivergingCommitCountsService, feature_category: :source_code_management do let(:project) { create(:project, :repository) } let(:repository) { project.repository } describe '#call' do let(:diverged_branch) { repository.find_branch('fix') } let(:root_ref_sha) { repository.raw_repository.commit(repository.root_ref).id } let(:diverged_branch_sha) { diverged_branch.dereferenced_target.sha } let(:service) { described_class.new(repository) } it 'returns the commit counts behind and ahead of default branch' do result = service.call(diverged_branch) expect(result).to eq(behind: 29, ahead: 2) end it 'calls diverging_commit_count without max count' do expect(repository.raw_repository) .to receive(:diverging_commit_count) .with(root_ref_sha, diverged_branch_sha) .and_return([29, 2]) service.call(diverged_branch) end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Branches class CreateService < BaseService def initialize(project, user = nil, params = {}) super(project, user, params) @errors = [] end def execute(branch_name, ref, create_default_branch_if_empty: true) create_default_branch if create_default_branch_if_empty && project.empty_repo? result = branch_validation_service.execute(branch_name) return result if result[:status] == :error create_branch(branch_name, ref) end def bulk_create(branches) reset_errors created_branches = branches .then { |branches| only_valid_branches(branches) } .then { |branches| create_branches(branches) } .then { |branches| expire_branches_cache(branches) } return error(errors) if errors.present? success(branches: created_branches) end private attr_reader :errors def reset_errors @errors = [] end def only_valid_branches(branches) branches.select do |branch_name, _ref| result = branch_validation_service.execute(branch_name) if result[:status] == :error errors << result[:message] next end true end end def create_branches(branches) branches.filter_map do |branch_name, ref| result = create_branch(branch_name, ref, expire_cache: false) if result[:status] == :error errors << result[:message] next end result[:branch] end end def expire_branches_cache(branches) repository.expire_branches_cache if branches.present? branches end def create_branch(branch_name, ref, expire_cache: true) new_branch = repository.add_branch(current_user, branch_name, ref, expire_cache: expire_cache) if new_branch success(branch: new_branch) else error("Failed to create branch '#{branch_name}': invalid reference name '#{ref}'") end rescue Gitlab::Git::CommandError => e error("Failed to create branch '#{branch_name}': #{e}") rescue Gitlab::Git::PreReceiveError => e Gitlab::ErrorTracking.log_exception(e, pre_receive_message: e.raw_message, branch_name: branch_name, ref: ref) error(e.message) end def create_default_branch project.repository.create_file( current_user, '/README.md', '', message: 'Add README.md', branch_name: project.default_branch_or_main ) end def branch_validation_service @branch_validation_service ||= ::Branches::ValidateNewService.new(project) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Branches::CreateService, :use_clean_rails_redis_caching, feature_category: :source_code_management do subject(:service) { described_class.new(project, user) } let_it_be(:project) { create(:project_empty_repo) } let_it_be(:user) { create(:user) } describe '#bulk_create' do subject { service.bulk_create(branches) } let_it_be(:project) { create(:project, :custom_repo, files: { 'foo/a.txt' => 'foo' }) } let(:branches) { { 'branch' => 'master', 'another_branch' => 'master' } } it 'creates two branches' do expect(subject[:status]).to eq(:success) expect(subject[:branches].map(&:name)).to match_array(%w[branch another_branch]) expect(project.repository.branch_exists?('branch')).to be_truthy expect(project.repository.branch_exists?('another_branch')).to be_truthy end context 'when branches are empty' do let(:branches) { {} } it 'is successful' do expect(subject[:status]).to eq(:success) expect(subject[:branches]).to eq([]) end end context 'when incorrect reference is provided' do let(:branches) { { 'new-feature' => 'unknown' } } before do allow(project.repository).to receive(:add_branch).and_return(false) end it 'returns an error with a reference name' do err_msg = 'Failed to create branch \'new-feature\': invalid reference name \'unknown\'' expect(subject[:status]).to eq(:error) expect(subject[:message]).to match_array([err_msg]) end end context 'when branch already exists' do let(:branches) { { 'master' => 'master' } } it 'returns an error' do expect(subject[:status]).to eq(:error) expect(subject[:message]).to match_array(['Branch already exists']) end end context 'when PreReceiveError exception' do let(:branches) { { 'error' => 'master' } } it 'logs and returns an error if there is a PreReceiveError exception' do error_message = 'pre receive error' raw_message = "GitLab: #{error_message}" pre_receive_error = Gitlab::Git::PreReceiveError.new(raw_message) allow(project.repository).to receive(:add_branch).and_raise(pre_receive_error) expect(Gitlab::ErrorTracking).to receive(:log_exception).with( pre_receive_error, pre_receive_message: raw_message, branch_name: 'error', ref: 'master' ) expect(subject[:status]).to eq(:error) expect(subject[:message]).to match_array([error_message]) end end context 'when multiple errors occur' do let(:branches) { { 'master' => 'master', '' => 'master', 'failed_branch' => 'master' } } it 'returns all errors' do allow(project.repository).to receive(:add_branch).with( user, 'failed_branch', 'master', expire_cache: false ).and_return(false) expect(subject[:status]).to eq(:error) expect(subject[:message]).to match_array( [ 'Branch already exists', 'Branch name is invalid', "Failed to create branch 'failed_branch': invalid reference name 'master'" ] ) end end context 'without N+1 for Redis cache' do let(:branches) { { 'branch1' => 'master', 'branch2' => 'master', 'branch3' => 'master' } } it 'does not trigger Redis recreation' do project.repository.expire_branches_cache control = RedisCommands::Recorder.new(pattern: ':branch_names:') { subject } expect(control).not_to exceed_redis_command_calls_limit(:sadd, 1) end end context 'without N+1 branch cache expiration' do let(:branches) { { 'branch_1' => 'master', 'branch_2' => 'master', 'branch_3' => 'master' } } it 'triggers branch cache expiration only once' do expect(project.repository).to receive(:expire_branches_cache).once subject end context 'when branches were not added' do let(:branches) { { 'master' => 'master' } } it 'does not trigger branch expiration' do expect(project.repository).not_to receive(:expire_branches_cache) subject end end end end describe '#execute' do context 'when repository is empty' do it 'creates master branch' do result = service.execute('my-feature', 'master') expect(result[:status]).to eq(:success) expect(result[:branch].name).to eq('my-feature') expect(project.repository.branch_exists?('master')).to be_truthy end it 'creates another-feature branch' do service.execute('another-feature', 'master') expect(project.repository.branch_exists?('another-feature')).to be_truthy end end context 'when branch already exists' do it 'returns an error' do result = service.execute('master', 'master') expect(result[:status]).to eq(:error) expect(result[:message]).to eq('Branch already exists') end end context 'when incorrect reference is provided' do before do allow(project.repository).to receive(:add_branch).and_return(false) end it 'returns an error with a reference name' do err_msg = 'Failed to create branch \'new-feature\': invalid reference name \'unknown\'' result = service.execute('new-feature', 'unknown') expect(result[:status]).to eq(:error) expect(result[:message]).to eq(err_msg) end end it 'logs and returns an error if there is a PreReceiveError exception' do error_message = 'pre receive error' raw_message = "GitLab: #{error_message}" pre_receive_error = Gitlab::Git::PreReceiveError.new(raw_message) allow(project.repository).to receive(:add_branch).and_raise(pre_receive_error) expect(Gitlab::ErrorTracking).to receive(:log_exception).with( pre_receive_error, pre_receive_message: raw_message, branch_name: 'new-feature', ref: 'unknown' ) result = service.execute('new-feature', 'unknown') expect(result[:status]).to eq(:error) expect(result[:message]).to eq(error_message) end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Branches class DeleteService < BaseService def execute(branch_name) repository = project.repository branch = repository.find_branch(branch_name) unless current_user.can?(:push_code, project) return ServiceResponse.error( message: 'You dont have push access to repo', http_status: 405) end unless branch return ServiceResponse.error( message: 'No such branch', http_status: 404) end if repository.rm_branch(current_user, branch_name) unlock_artifacts(branch_name) ServiceResponse.success(message: 'Branch was deleted') else ServiceResponse.error( message: 'Failed to remove branch', http_status: 400) end rescue Gitlab::Git::PreReceiveError, Gitlab::Git::CommandError => ex ServiceResponse.error(message: ex.message, http_status: 400) end private def unlock_artifacts(branch_name) Ci::RefDeleteUnlockArtifactsWorker.perform_async(project.id, current_user.id, "#{::Gitlab::Git::BRANCH_REF_PREFIX}#{branch_name}") end end end Branches::DeleteService.prepend_mod ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Branches::DeleteService, feature_category: :source_code_management do let(:project) { create(:project, :repository) } let(:repository) { project.repository } let(:user) { create(:user) } subject(:service) { described_class.new(project, user) } shared_examples 'a deleted branch' do |branch_name| before do allow(Ci::RefDeleteUnlockArtifactsWorker).to receive(:perform_async) end it 'removes the branch' do expect(branch_exists?(branch_name)).to be true result = service.execute(branch_name) expect(result.status).to eq :success expect(branch_exists?(branch_name)).to be false end it 'calls the RefDeleteUnlockArtifactsWorker' do expect(Ci::RefDeleteUnlockArtifactsWorker).to receive(:perform_async).with(project.id, user.id, "refs/heads/#{branch_name}") service.execute(branch_name) end end describe '#execute' do context 'when user has access to push to repository' do before do project.add_developer(user) end it_behaves_like 'a deleted branch', 'feature' context 'when Gitlab::Git::CommandError is raised' do before do allow(repository).to receive(:rm_branch) do raise Gitlab::Git::CommandError, 'Could not update patch' end end it 'handles and returns error' do result = service.execute('feature') expect(result.status).to eq(:error) expect(result.message).to eq('Could not update patch') end end end context 'when user does not have access to push to repository' do it 'does not remove branch' do expect(branch_exists?('feature')).to be true result = service.execute('feature') expect(result.status).to eq :error expect(result.message).to eq 'You dont have push access to repo' expect(branch_exists?('feature')).to be true end end end def branch_exists?(branch_name) repository.ref_exists?("refs/heads/#{branch_name}") end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module NamespaceSettings class UpdateService include ::Gitlab::Allowable attr_reader :current_user, :group, :settings_params def initialize(current_user, group, settings) @current_user = current_user @group = group @settings_params = settings end def execute validate_resource_access_token_creation_allowed_param validate_settings_param_for_root_group( param_key: :prevent_sharing_groups_outside_hierarchy, user_policy: :change_prevent_sharing_groups_outside_hierarchy ) validate_settings_param_for_root_group( param_key: :new_user_signups_cap, user_policy: :change_new_user_signups_cap ) validate_settings_param_for_root_group( param_key: :default_branch_protection, user_policy: :update_default_branch_protection ) validate_settings_param_for_root_group( param_key: :default_branch_protection_defaults, user_policy: :update_default_branch_protection ) handle_default_branch_protection unless settings_params[:default_branch_protection].blank? if group.namespace_settings group.namespace_settings.attributes = settings_params else group.build_namespace_settings(settings_params) end end private def handle_default_branch_protection # We are migrating default_branch_protection from an integer # column to a jsonb column. While completing the rest of the # work, we want to start translating the updates sent to the # existing column into the json. Eventually, we will be updating # the jsonb column directly and deprecating the original update # path. Until then, we want to sync up both columns. protection = Gitlab::Access::BranchProtection.new(settings_params.delete(:default_branch_protection).to_i) settings_params[:default_branch_protection_defaults] = protection.to_hash end def validate_resource_access_token_creation_allowed_param return if settings_params[:resource_access_token_creation_allowed].nil? unless can?(current_user, :admin_group, group) settings_params.delete(:resource_access_token_creation_allowed) group.namespace_settings.errors.add(:resource_access_token_creation_allowed, _('can only be changed by a group admin.')) end end def validate_settings_param_for_root_group(param_key:, user_policy:) return if settings_params[param_key].nil? unless can?(current_user, user_policy, group) settings_params.delete(param_key) group.namespace_settings.errors.add(param_key, _('can only be changed by a group admin.')) end unless group.root? settings_params.delete(param_key) group.namespace_settings.errors.add(param_key, _('only available on top-level groups.')) end end end end NamespaceSettings::UpdateService.prepend_mod_with('NamespaceSettings::UpdateService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe NamespaceSettings::UpdateService, feature_category: :groups_and_projects do let(:user) { create(:user) } let(:group) { create(:group) } let(:settings) { {} } subject(:service) { described_class.new(user, group, settings) } describe "#execute" do context "group has no namespace_settings" do before do group.namespace_settings.destroy! end it "builds out a new namespace_settings record" do expect do service.execute end.to change { NamespaceSetting.count }.by(1) end end context "group has a namespace_settings" do before do service.execute end it "doesn't create a new namespace_setting record" do expect do service.execute end.not_to change { NamespaceSetting.count } end end context "updating :default_branch_name" do let(:example_branch_name) { "example_branch_name" } let(:settings) { { default_branch_name: example_branch_name } } it "changes settings" do expect { service.execute } .to change { group.namespace_settings.default_branch_name } .from(nil).to(example_branch_name) end end context 'when default_branch_protection is updated' do let(:namespace_settings) { group.namespace_settings } let(:expected) { ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys } let(:settings) { { default_branch_protection: ::Gitlab::Access::PROTECTION_DEV_CAN_MERGE } } before do group.add_owner(user) end it "updates default_branch_protection_defaults from the default_branch_protection param" do expect { service.execute } .to change { namespace_settings.default_branch_protection_defaults } .from({}).to(expected) end end context 'when default_branch_protection_defaults is updated' do let(:namespace_settings) { group.namespace_settings } let(:branch_protection) { ::Gitlab::Access::BranchProtection.protected_against_developer_pushes.stringify_keys } let(:expected) { branch_protection } let(:settings) { { default_branch_protection_defaults: branch_protection } } context 'when the user has the ability to update' do before do allow(Ability).to receive(:allowed?).with(user, :update_default_branch_protection, group).and_return(true) end context 'when group is root' do before do allow(group).to receive(:root?).and_return(true) end it "updates default_branch_protection_defaults from the default_branch_protection param" do expect { service.execute } .to change { namespace_settings.default_branch_protection_defaults } .from({}).to(expected) end end context 'when group is not root' do before do allow(group).to receive(:root?).and_return(false) end it "does not update default_branch_protection_defaults and adds an error to the namespace_settings", :aggregate_failures do expect { service.execute }.not_to change { namespace_settings.default_branch_protection_defaults } expect(group.namespace_settings.errors[:default_branch_protection_defaults]).to include('only available on top-level groups.') end end end context 'when the user does not have the ability to update' do before do allow(Ability).to receive(:allowed?).with(user, :update_default_branch_protection, group).and_return(false) end it "does not update default_branch_protection_defaults and adds an error to the namespace_settings", :aggregate_failures do expect { service.execute }.not_to change { namespace_settings.default_branch_protection_defaults } expect(group.namespace_settings.errors[:default_branch_protection_defaults]).to include('can only be changed by a group admin.') end end end context "updating :resource_access_token_creation_allowed" do let(:settings) { { resource_access_token_creation_allowed: false } } context 'when user is a group owner' do before do group.add_owner(user) end it "changes settings" do expect { service.execute } .to change { group.namespace_settings.resource_access_token_creation_allowed } .from(true).to(false) end end context 'when user is not a group owner' do before do group.add_developer(user) end it "does not change settings" do expect { service.execute }.not_to change { group.namespace_settings.resource_access_token_creation_allowed } end it 'returns the group owner error' do service.execute expect(group.namespace_settings.errors.messages[:resource_access_token_creation_allowed]).to include('can only be changed by a group admin.') end end end describe 'validating settings param for root group' do using RSpec::Parameterized::TableSyntax where(:setting_key, :setting_changes_from, :setting_changes_to) do :prevent_sharing_groups_outside_hierarchy | false | true :new_user_signups_cap | nil | 100 end with_them do let(:settings) do { setting_key => setting_changes_to } end context 'when user is not a group owner' do before do group.add_maintainer(user) end it 'does not change settings' do expect { service.execute }.not_to change { group.namespace_settings.public_send(setting_key) } end it 'returns the group owner error' do service.execute expect(group.namespace_settings.errors.messages[setting_key]).to include('can only be changed by a group admin.') end end context 'with a subgroup' do let(:subgroup) { create(:group, parent: group) } before do group.add_owner(user) end it 'does not change settings' do service = described_class.new(user, subgroup, settings) expect { service.execute }.not_to change { group.namespace_settings.public_send(setting_key) } expect(subgroup.namespace_settings.errors.messages[setting_key]).to include('only available on top-level groups.') end end context 'when user is a group owner' do before do group.add_owner(user) end it 'changes settings' do expect { service.execute } .to change { group.namespace_settings.public_send(setting_key) } .from(setting_changes_from).to(setting_changes_to) end end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module ObjectStorage class DeleteStaleDirectUploadsService < BaseService MAX_EXEC_DURATION = 250.seconds.freeze def initialize; end def execute total_pending_entries = ObjectStorage::PendingDirectUpload.count total_deleted_stale_entries = 0 timeout = false start = Time.current ObjectStorage::PendingDirectUpload.each do |pending_upload| if pending_upload.stale? pending_upload.delete total_deleted_stale_entries += 1 end if (Time.current - start) > MAX_EXEC_DURATION timeout = true break end end success( total_pending_entries: total_pending_entries, total_deleted_stale_entries: total_deleted_stale_entries, execution_timeout: timeout ) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ObjectStorage::DeleteStaleDirectUploadsService, :direct_uploads, :clean_gitlab_redis_shared_state, feature_category: :shared do let(:service) { described_class.new } describe '#execute', :aggregate_failures do subject(:execute_result) { service.execute } let(:location_identifier) { JobArtifactUploader.storage_location_identifier } let(:fog_connection) { stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) } let(:stale_path_1) { 'stale/path/123' } let!(:stale_object_1) do fog_connection.directories .new(key: location_identifier.to_s) .files .create( # rubocop:disable Rails/SaveBang key: stale_path_1, body: 'something' ) end let(:stale_path_2) { 'stale/path/456' } let!(:stale_object_2) do fog_connection.directories .new(key: location_identifier.to_s) .files .create( # rubocop:disable Rails/SaveBang key: stale_path_2, body: 'something' ) end let(:non_stale_path) { 'nonstale/path/123' } let!(:non_stale_object) do fog_connection.directories .new(key: location_identifier.to_s) .files .create( # rubocop:disable Rails/SaveBang key: non_stale_path, body: 'something' ) end it 'only deletes stale entries', :aggregate_failures do prepare_pending_direct_upload(stale_path_1, 5.hours.ago) prepare_pending_direct_upload(stale_path_2, 4.hours.ago) prepare_pending_direct_upload(non_stale_path, 3.minutes.ago) expect(execute_result).to eq( status: :success, total_pending_entries: 3, total_deleted_stale_entries: 2, execution_timeout: false ) expect_not_to_have_pending_direct_upload(stale_path_1) expect_pending_uploaded_object_not_to_exist(stale_path_1) expect_not_to_have_pending_direct_upload(stale_path_2) expect_pending_uploaded_object_not_to_exist(stale_path_2) expect_to_have_pending_direct_upload(non_stale_path) expect_pending_uploaded_object_to_exist(non_stale_path) end context 'when a stale entry does not have a matching object in the storage' do it 'does not fail and still remove the stale entry' do stale_no_object_path = 'some/other/path' prepare_pending_direct_upload(stale_path_1, 5.hours.ago) prepare_pending_direct_upload(stale_no_object_path, 5.hours.ago) expect(execute_result[:status]).to eq(:success) expect_not_to_have_pending_direct_upload(stale_path_1) expect_pending_uploaded_object_not_to_exist(stale_path_1) expect_not_to_have_pending_direct_upload(stale_no_object_path) end end context 'when timeout happens' do before do stub_const("#{described_class}::MAX_EXEC_DURATION", 0.seconds) prepare_pending_direct_upload(stale_path_1, 5.hours.ago) prepare_pending_direct_upload(stale_path_2, 4.hours.ago) end it 'completes the current iteration and reports information about total entries' do expect(execute_result).to eq( status: :success, total_pending_entries: 2, total_deleted_stale_entries: 1, execution_timeout: true ) expect_not_to_have_pending_direct_upload(stale_path_1) expect_pending_uploaded_object_not_to_exist(stale_path_1) expect_to_have_pending_direct_upload(stale_path_2) expect_pending_uploaded_object_to_exist(stale_path_2) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Todos class AllowedTargetFilterService include Gitlab::Allowable def initialize(todos, current_user) @todos = todos @current_user = current_user end def execute Preloaders::UserMaxAccessLevelInProjectsPreloader.new(@todos.map(&:project).compact, @current_user).execute @todos.select { |todo| can?(@current_user, :read_todo, todo) } end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Todos::AllowedTargetFilterService, feature_category: :team_planning do include DesignManagementTestHelpers let_it_be(:authorized_group) { create(:group, :private) } let_it_be(:authorized_project) { create(:project, group: authorized_group) } let_it_be(:unauthorized_group) { create(:group, :private) } let_it_be(:unauthorized_project) { create(:project, group: unauthorized_group) } let_it_be(:user) { create(:user) } let_it_be(:authorized_issue) { create(:issue, project: authorized_project) } let_it_be(:authorized_issue_todo) { create(:todo, project: authorized_project, target: authorized_issue, user: user) } let_it_be(:authorized_note) { create(:note, noteable: authorized_issue, project: authorized_project) } let_it_be(:authorized_note_todo) { create(:todo, project: authorized_project, target: authorized_issue, note: authorized_note, user: user) } let_it_be(:confidential_issue) { create(:issue, :confidential, project: authorized_project) } let_it_be(:confidential_issue_todo) { create(:todo, project: authorized_project, target: confidential_issue, user: user) } let_it_be(:confidential_note) { create(:note, :confidential, noteable: confidential_issue, project: authorized_project) } let_it_be(:confidential_note_todo) { create(:todo, project: authorized_project, target: authorized_issue, note: confidential_note, user: user) } let_it_be(:unauthorized_issue) { create(:issue, project: unauthorized_project) } let_it_be(:unauthorized_issue_todo) { create(:todo, project: unauthorized_project, target: unauthorized_issue, user: user) } let_it_be(:authorized_design) { create(:design, issue: authorized_issue) } let_it_be(:authorized_design_todo) { create(:todo, project: authorized_project, target: authorized_design, user: user) } let_it_be(:unauthorized_design) { create(:design, issue: unauthorized_issue) } let_it_be(:unauthorized_design_todo) { create(:todo, project: unauthorized_project, target: unauthorized_design, user: user) } let_it_be(:unauthorized_note) { create(:note, noteable: unauthorized_issue, project: unauthorized_project) } let_it_be(:unauthorized_note_todo) { create(:todo, project: unauthorized_project, target: unauthorized_issue, note: unauthorized_note, user: user) } # Cannot use let_it_be with MRs let(:authorized_mr) { create(:merge_request, source_project: authorized_project) } let(:authorized_mr_todo) { create(:todo, project: authorized_project, user: user, target: authorized_mr) } let(:unauthorized_mr) { create(:merge_request, source_project: unauthorized_project) } let(:unauthorized_mr_todo) { create(:todo, project: unauthorized_project, user: user, target: unauthorized_mr) } describe '#execute' do let(:all_todos) { authorized_todos + unauthorized_todos } subject(:execute_service) { described_class.new(all_todos, user).execute } shared_examples 'allowed Todos filter' do before do enable_design_management end it { is_expected.to match_array(authorized_todos) } end context 'with reporter user' do before_all do authorized_group.add_reporter(user) end it_behaves_like 'allowed Todos filter' do let(:authorized_todos) do [ authorized_mr_todo, authorized_issue_todo, confidential_issue_todo, confidential_note_todo, authorized_design_todo ] end let(:unauthorized_todos) do [ unauthorized_mr_todo, unauthorized_issue_todo, unauthorized_note_todo, unauthorized_design_todo ] end end end context 'with guest user' do before_all do authorized_group.add_guest(user) end it_behaves_like 'allowed Todos filter' do let(:authorized_todos) do [ authorized_issue_todo, authorized_design_todo ] end let(:unauthorized_todos) do [ authorized_mr_todo, confidential_issue_todo, confidential_note_todo, unauthorized_mr_todo, unauthorized_issue_todo, unauthorized_note_todo, unauthorized_design_todo ] end end end context 'with a non-member user' do it_behaves_like 'allowed Todos filter' do let(:authorized_todos) { [] } let(:unauthorized_todos) do [ authorized_issue_todo, authorized_design_todo, authorized_mr_todo, confidential_issue_todo, confidential_note_todo, unauthorized_mr_todo, unauthorized_issue_todo, unauthorized_note_todo, unauthorized_design_todo ] end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Todos module Destroy class UnauthorizedFeaturesService < ::Todos::Destroy::BaseService attr_reader :project_id, :user_id BATCH_SIZE = 1000 def initialize(project_id, user_id = nil) @project_id = project_id @user_id = user_id end # rubocop: disable CodeReuse/ActiveRecord def execute return if user_id && authorized_users.where(user_id: user_id).exists? related_todos.each_batch(of: BATCH_SIZE) do |batch| pending_delete = without_authorized(batch).includes(:target, :user).reject do |todo| Ability.allowed?(todo.user, :read_todo, todo, scope: :user) end Todo.where(id: pending_delete).delete_all if pending_delete.present? end end # rubocop: enable CodeReuse/ActiveRecord private def without_authorized(items) items.not_in_users(authorized_users) end def authorized_users ProjectAuthorization.select(:user_id).for_project(project_ids) end def related_todos base_scope = Todo.for_project(project_id) base_scope = base_scope.for_user(user_id) if user_id base_scope end # Compatibility for #authorized_users in this class we always work # with 1 project for queries efficiency def project_ids [project_id] end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Todos::Destroy::UnauthorizedFeaturesService, feature_category: :team_planning do let_it_be(:project, reload: true) { create(:project, :public, :repository) } let_it_be(:issue) { create(:issue, project: project) } let_it_be(:mr) { create(:merge_request, source_project: project) } let_it_be(:user) { create(:user) } let_it_be(:another_user) { create(:user) } let_it_be(:project_member) do create(:user).tap do |user| project.add_developer(user) end end let!(:todo_mr_non_member) { create(:todo, user: user, target: mr, project: project) } let!(:todo_mr_non_member2) { create(:todo, user: another_user, target: mr, project: project) } let!(:todo_mr_member) { create(:todo, user: project_member, target: mr, project: project) } let!(:todo_issue_non_member) { create(:todo, user: user, target: issue, project: project) } let!(:todo_issue_non_member2) { create(:todo, user: another_user, target: issue, project: project) } let!(:todo_issue_member) { create(:todo, user: project_member, target: issue, project: project) } let!(:commit_todo_non_member) { create(:on_commit_todo, user: user, project: project) } let!(:commit_todo_non_member2) { create(:on_commit_todo, user: another_user, project: project) } let!(:commit_todo_member) { create(:on_commit_todo, user: project_member, project: project) } context 'when user_id is provided' do subject { described_class.new(project.id, user.id).execute } context 'when all features have same visibility as the project' do it 'removes only user issue todos' do expect { subject }.not_to change { Todo.count } end end context 'when issues are visible only to project members but the user is a member' do before do project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE) project.add_developer(user) end it 'does not remove any todos' do expect { subject }.not_to change { Todo.count } end end context 'when issues are visible only to project members' do before do project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE) end it 'removes only user issue todos' do expect { subject }.to change { Todo.count }.from(9).to(8) end end context 'when mrs, builds and repository are visible only to project members' do before do # builds and merge requests cannot have higher visibility than repository project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE) project.project_feature.update!(builds_access_level: ProjectFeature::PRIVATE) project.project_feature.update!(repository_access_level: ProjectFeature::PRIVATE) end it 'removes only user mr and commit todos' do expect { subject }.to change { Todo.count }.from(9).to(7) end end context 'when mrs are visible only to project members' do before do project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE) end it 'removes only user merge request todo' do expect { subject }.to change { Todo.count }.from(9).to(8) end end context 'when mrs and issues are visible only to project members' do before do project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE) project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE) end it 'removes only user merge request and issue todos' do expect { subject }.to change { Todo.count }.from(9).to(7) end end end context 'when user_id is not provided' do subject { described_class.new(project.id).execute } context 'when all features have same visibility as the project' do it 'does not remove any todos' do expect { subject }.not_to change { Todo.count } end end context 'when issues are visible only to project members' do before do project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE) end it 'removes only non members issue todos' do expect { subject }.to change { Todo.count }.from(9).to(7) end end context 'when mrs, builds and repository are visible only to project members' do before do # builds and merge requests cannot have higher visibility than repository project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE) project.project_feature.update!(builds_access_level: ProjectFeature::PRIVATE) project.project_feature.update!(repository_access_level: ProjectFeature::PRIVATE) end it 'removes only non members mr and commit todos' do expect { subject }.to change { Todo.count }.from(9).to(5) end end context 'when mrs are visible only to project members' do before do project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE) end it 'removes only non members merge request todos' do expect { subject }.to change { Todo.count }.from(9).to(7) end end context 'when mrs and issues are visible only to project members' do before do project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE) project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE) end it 'removes only non members merge request and issue todos' do expect { subject }.to change { Todo.count }.from(9).to(5) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Todos module Destroy class DestroyedIssuableService BATCH_SIZE = 100 # Since we are moving towards work items, in some instances we create todos with # `target_type: WorkItem` in other instances we still create todos with `target_type: Issue` # So when an issue/work item is deleted, we just make sure to delete todos for both target types BOUND_TARGET_TYPES = %w[Issue WorkItem].freeze def initialize(target_id, target_type) @target_id = target_id @target_type = BOUND_TARGET_TYPES.include?(target_type) ? BOUND_TARGET_TYPES : target_type end def execute inner_query = Todo.select(:id).for_target(target_id).for_type(target_type).limit(BATCH_SIZE) delete_query = <<~SQL DELETE FROM "#{Todo.table_name}" WHERE id IN (#{inner_query.to_sql}) RETURNING user_id SQL loop do result = Todo.connection.execute(delete_query) break if result.cmd_tuples == 0 user_ids = result.map { |row| row['user_id'] }.uniq invalidate_todos_cache_counts(user_ids) end end private attr_reader :target_id, :target_type def invalidate_todos_cache_counts(user_ids) user_ids.each do |id| # Only build a user instance since we only need its ID for # `User#invalidate_todos_cache_counts` to work. User.new(id: id).invalidate_todos_cache_counts end end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Todos::Destroy::DestroyedIssuableService, feature_category: :team_planning do describe '#execute' do let_it_be(:user) { create(:user) } subject { described_class.new(target.id, target.class.name).execute } context 'when target is merge request' do let_it_be(:target) { create(:merge_request) } let_it_be(:pending_todo) { create(:todo, :pending, project: target.project, target: target, user: user) } let_it_be(:done_todo) { create(:todo, :done, project: target.project, target: target, user: user) } it 'deletes todos for specified target ID and type' do control_count = ActiveRecord::QueryRecorder.new { subject }.count # Create more todos for the target create(:todo, :pending, project: target.project, target: target, user: user) create(:todo, :pending, project: target.project, target: target, user: user) create(:todo, :done, project: target.project, target: target, user: user) create(:todo, :done, project: target.project, target: target, user: user) expect { subject }.not_to exceed_query_limit(control_count) end it 'invalidates todos cache counts of todo users', :use_clean_rails_redis_caching do expect { subject } .to change { pending_todo.user.todos_pending_count }.from(1).to(0) .and change { done_todo.user.todos_done_count }.from(1).to(0) end end context 'when target is an work item' do let_it_be(:target) { create(:work_item) } let_it_be(:todo1) { create(:todo, :pending, project: target.project, target: target, user: user) } let_it_be(:todo2) { create(:todo, :done, project: target.project, target: target, user: user) } # rubocop: disable Cop/AvoidBecomes let_it_be(:todo3) { create(:todo, :pending, project: target.project, target: target.becomes(Issue), user: user) } let_it_be(:todo4) { create(:todo, :done, project: target.project, target: target.becomes(Issue), user: user) } # rubocop: enable Cop/AvoidBecomes it 'deletes todos' do expect { subject }.to change(Todo, :count).by(-4) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Todos module Destroy # Service class for deleting todos that belongs to a deleted/archived design. class DesignService attr_reader :design_ids def initialize(design_ids) @design_ids = design_ids end def execute todos.delete_all end private def todos Todo.for_target(deleted_designs.select(:design_id)).for_type(DesignManagement::Design) end def deleted_designs DesignManagement::Action.by_design(design_ids).by_event(:deletion) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Todos::Destroy::DesignService, feature_category: :design_management do let_it_be(:user) { create(:user) } let_it_be(:user_2) { create(:user) } let_it_be(:design) { create(:design) } let_it_be(:design_2) { create(:design) } let_it_be(:design_3) { create(:design) } let_it_be(:create_action) { create(:design_action, design: design) } let_it_be(:create_action_2) { create(:design_action, design: design_2) } describe '#execute' do before do create(:todo, user: user, target: design) create(:todo, user: user_2, target: design) create(:todo, user: user, target: design_2) create(:todo, user: user, target: design_3) end subject { described_class.new([design.id, design_2.id, design_3.id]).execute } context 'when the design has been archived' do let_it_be(:archive_action) { create(:design_action, design: design, event: :deletion) } let_it_be(:archive_action_2) { create(:design_action, design: design_3, event: :deletion) } it 'removes todos for that design' do expect { subject }.to change { Todo.count }.from(4).to(1) end end context 'when no design has been archived' do it 'does not remove any todos' do expect { subject }.not_to change { Todo.count }.from(4) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Todos module Destroy class GroupPrivateService < ::Todos::Destroy::BaseService extend ::Gitlab::Utils::Override attr_reader :group def initialize(group_id) @group = Group.find_by_id(group_id) end def execute return unless todos_to_remove? delete_todos end private def delete_todos authorized_users = Member.from_union( [ group.descendant_project_members_with_inactive.select(:user_id), group.members_with_parents.select(:user_id) ], remove_duplicates: false ).select(:user_id) todos.not_in_users(authorized_users).delete_all end def todos Todo.for_group(group.id) end def todos_to_remove? group&.private? end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Todos::Destroy::GroupPrivateService, feature_category: :team_planning do let(:group) { create(:group, :public) } let(:project) { create(:project, group: group) } let(:user) { create(:user) } let(:group_member) { create(:user) } let(:project_member) { create(:user) } let!(:todo_non_member) { create(:todo, user: user, group: group) } let!(:todo_another_non_member) { create(:todo, user: user, group: group) } let!(:todo_group_member) { create(:todo, user: group_member, group: group) } let!(:todo_project_member) { create(:todo, user: project_member, group: group) } describe '#execute', :aggregate_failures do before do group.add_developer(group_member) project.add_developer(project_member) end subject { described_class.new(group.id).execute } context 'when a group set to private' do before do group.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) end it 'removes todos only for users who are not group users' do expect { subject }.to change { Todo.count }.from(4).to(2) expect(user.todos).to be_empty expect(group_member.todos).to match_array([todo_group_member]) expect(project_member.todos).to match_array([todo_project_member]) end context 'with nested groups' do let(:parent_group) { create(:group) } let(:subgroup) { create(:group, :private, parent: group) } let(:subproject) { create(:project, group: subgroup) } let(:parent_member) { create(:user) } let(:subgroup_member) { create(:user) } let(:subgproject_member) { create(:user) } let!(:todo_parent_member) { create(:todo, user: parent_member, group: group) } let!(:todo_subgroup_member) { create(:todo, user: subgroup_member, group: group) } let!(:todo_subproject_member) { create(:todo, user: subgproject_member, group: group) } before do group.update!(parent: parent_group) parent_group.add_developer(parent_member) subgroup.add_developer(subgroup_member) subproject.add_developer(subgproject_member) end it 'removes todos only for users who are not group users' do expect { subject }.to change { Todo.count }.from(7).to(4) expect(parent_member.todos).to contain_exactly(todo_parent_member) expect(subgroup_member.todos).to be_empty expect(subgproject_member.todos).to contain_exactly(todo_subproject_member) end end context 'with member via group share' do let(:invited_group) { create(:group) } let(:invited_group_member) { create(:user).tap { |u| invited_group.add_guest(u) } } let!(:todo_invited_group_member) { create(:todo, user: invited_group_member, group: group) } it 'does not remove todos for users invited to the group' do create(:group_group_link, shared_group: group, shared_with_group: invited_group) expect { subject }.to change { Todo.count }.from(5).to(3) expect(invited_group_member.todos).to contain_exactly(todo_invited_group_member) end it 'does not remove todos for users invited to an ancestor group' do parent_group = create(:group) group.update!(parent: parent_group) create(:group_group_link, shared_group: parent_group, shared_with_group: invited_group) expect { subject }.to change { Todo.count }.from(5).to(3) expect(invited_group_member.todos).to contain_exactly(todo_invited_group_member) end end end context 'when group is not private' do it 'does not remove any todos' do expect { subject }.not_to change { Todo.count } end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Todos module Destroy class ProjectPrivateService < ::Todos::Destroy::BaseService extend ::Gitlab::Utils::Override attr_reader :project def initialize(project_id) @project = Project.find_by_id(project_id) end def execute return unless todos_to_remove? delete_todos end private def delete_todos authorized_users = ProjectAuthorization.select(:user_id).for_project(project_ids) todos.not_in_users(authorized_users).delete_all end def todos Todo.for_project(project.id) end def project_ids project.id end def todos_to_remove? project&.private? end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Todos::Destroy::ProjectPrivateService, feature_category: :team_planning do let(:group) { create(:group, :public) } let(:project) { create(:project, :public, group: group) } let(:user) { create(:user) } let(:project_member) { create(:user) } let(:group_member) { create(:user) } let!(:todo_non_member) { create(:todo, user: user, project: project) } let!(:todo2_non_member) { create(:todo, user: user, project: project) } let!(:todo_member) { create(:todo, user: project_member, project: project) } let!(:todo_group_member) { create(:todo, user: group_member, project: project) } describe '#execute' do before do project.add_developer(project_member) group.add_developer(group_member) end subject { described_class.new(project.id).execute } context 'when a project set to private' do before do project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE) end it 'removes issue todos for a user who is not a member' do expect { subject }.to change { Todo.count }.from(4).to(2) expect(user.todos).to be_empty expect(project_member.todos).to match_array([todo_member]) expect(group_member.todos).to match_array([todo_group_member]) end end context 'when project is not private' do it 'does not remove any todos' do expect { subject }.not_to change { Todo.count } end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Todos module Destroy class EntityLeaveService < ::Todos::Destroy::BaseService extend ::Gitlab::Utils::Override attr_reader :user, :entity def initialize(user_id, entity_id, entity_type) unless %w[Group Project].include?(entity_type) raise ArgumentError, "#{entity_type} is not an entity user can leave" end @user = UserFinder.new(user_id).find_by_id @entity = entity_type.constantize.find_by(id: entity_id) # rubocop: disable CodeReuse/ActiveRecord end def execute return unless entity && user # if at least reporter, all entities including confidential issues can be accessed return if user_has_reporter_access? remove_confidential_resource_todos remove_group_todos if entity.private? remove_project_todos else enqueue_private_features_worker end end private def enqueue_private_features_worker projects.each do |project| TodosDestroyer::PrivateFeaturesWorker.perform_async(project.id, user.id) end end def remove_confidential_resource_todos # Deletes todos for confidential issues Todo .for_target(confidential_issues.select(:id)) .for_type(Issue.name) .for_user(user) .delete_all # Deletes todos for internal notes on unauthorized projects Todo .for_type(Issue.name) .for_internal_notes .for_project(non_authorized_reporter_projects) # Only Reporter+ can read internal notes .for_user(user) .delete_all end def remove_project_todos # Issues are viewable by guests (even in private projects), so remove those todos # from projects without guest access Todo .for_project(non_authorized_guest_projects) .for_user(user) .delete_all # MRs require reporter access, so remove those todos that are not authorized Todo .for_project(non_authorized_reporter_projects) .for_type(MergeRequest.name) .for_user(user) .delete_all end def remove_group_todos return unless entity.is_a?(Namespace) Todo .for_group(unauthorized_private_groups) .for_user(user) .delete_all end def projects condition = case entity when Project { id: entity.id } when Namespace { namespace_id: non_authorized_reporter_groups } end Project.where(condition) # rubocop: disable CodeReuse/ActiveRecord end def authorized_reporter_projects user.authorized_projects(Gitlab::Access::REPORTER).select(:id) end def authorized_guest_projects user.authorized_projects(Gitlab::Access::GUEST).select(:id) end def non_authorized_reporter_projects projects.id_not_in(authorized_reporter_projects) end def non_authorized_guest_projects projects.id_not_in(authorized_guest_projects) end def authorized_reporter_groups GroupsFinder.new(user, min_access_level: Gitlab::Access::REPORTER).execute.select(:id) end # rubocop: disable CodeReuse/ActiveRecord def unauthorized_private_groups return [] unless entity.is_a?(Namespace) groups = entity.self_and_descendants.private_only groups.select(:id) .id_not_in(GroupsFinder.new(user, all_available: false).execute.select(:id).reorder(nil)) end # rubocop: enable CodeReuse/ActiveRecord def non_authorized_reporter_groups entity.self_and_descendants.select(:id) .id_not_in(authorized_reporter_groups) end def user_has_reporter_access? return unless entity.is_a?(Namespace) entity.member?(User.find(user.id), Gitlab::Access::REPORTER) end def confidential_issues assigned_ids = IssueAssignee.select(:issue_id).for_assignee(user) Issue .in_projects(projects) .confidential_only .not_in_projects(authorized_reporter_projects) .not_authored_by(user) .id_not_in(assigned_ids) end end end end Todos::Destroy::EntityLeaveService.prepend_mod_with('Todos::Destroy::EntityLeaveService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Todos::Destroy::EntityLeaveService, feature_category: :team_planning do let_it_be(:user, reload: true) { create(:user) } let_it_be(:user2, reload: true) { create(:user) } let_it_be_with_refind(:group) { create(:group, :private) } let_it_be(:project) { create(:project, :private, group: group) } let(:issue) { create(:issue, project: project) } let(:issue_c) { create(:issue, project: project, confidential: true) } let!(:todo_group_user) { create(:todo, user: user, group: group) } let!(:todo_group_user2) { create(:todo, user: user2, group: group) } let(:mr) { create(:merge_request, source_project: project) } let!(:todo_mr_user) { create(:todo, user: user, target: mr, project: project) } let!(:todo_issue_user) { create(:todo, user: user, target: issue, project: project) } let!(:todo_issue_c_user) { create(:todo, user: user, target: issue_c, project: project) } let!(:todo_issue_c_user2) { create(:todo, user: user2, target: issue_c, project: project) } let(:internal_note) { create(:note, noteable: issue, project: project, confidential: true) } let!(:todo_for_internal_note) do create(:todo, user: user, target: issue, project: project, note: internal_note) end shared_examples 'using different access permissions' do before do set_access(project, user, project_access) if project_access set_access(group, user, group_access) if group_access end it params[:method].to_s.humanize(capitalize: false) do send(method_name) end end shared_examples 'does not remove any todos' do it { does_not_remove_any_todos } end shared_examples 'removes confidential issues and internal notes todos' do it { removes_confidential_issues_and_internal_notes_todos } end shared_examples 'removes only internal notes todos' do it { removes_only_internal_notes_todos } end def does_not_remove_any_todos expect { subject }.not_to change { Todo.count } end def removes_only_internal_notes_todos expect { subject }.to change { Todo.count }.from(7).to(6) end def removes_confidential_issues_and_internal_notes_todos expect { subject }.to change { Todo.count }.from(7).to(5) end def removes_confidential_issues_and_internal_notes_and_merge_request_todos expect { subject }.to change { Todo.count }.from(7).to(4) expect(user.todos).to match_array([todo_issue_user, todo_group_user]) end def set_access(object, user, access_name) case access_name when :developer object.add_developer(user) when :reporter object.add_reporter(user) when :guest object.add_guest(user) end end describe '#execute' do describe 'updating a Project' do subject { described_class.new(user.id, project.id, 'Project').execute } # a private project in a private group is valid context 'when project is private' do context 'when user is not a member of the project' do it 'removes project todos for the provided user' do expect { subject }.to change { Todo.count }.from(7).to(3) expect(user.todos).to match_array([todo_group_user]) expect(user2.todos).to match_array([todo_issue_c_user2, todo_group_user2]) end end context 'access permissions' do where(:group_access, :project_access, :method_name) do [ [nil, :reporter, :does_not_remove_any_todos], [nil, :guest, :removes_confidential_issues_and_internal_notes_and_merge_request_todos], [:reporter, nil, :does_not_remove_any_todos], [:guest, nil, :removes_confidential_issues_and_internal_notes_and_merge_request_todos], [:guest, :reporter, :does_not_remove_any_todos], [:guest, :guest, :removes_confidential_issues_and_internal_notes_and_merge_request_todos] ] end with_them do it_behaves_like 'using different access permissions' end end end # a private project in an internal/public group is valid context 'when project is private in an internal/public group' do let_it_be(:group) { create(:group, :internal) } let_it_be(:project) { create(:project, :private, group: group) } context 'when user is not a member of the project' do it 'removes project todos for the provided user' do expect { subject }.to change { Todo.count }.from(7).to(3) expect(user.todos).to match_array([todo_group_user]) expect(user2.todos).to match_array([todo_issue_c_user2, todo_group_user2]) end end context 'access permissions' do where(:group_access, :project_access, :method_name) do [ [nil, :reporter, :does_not_remove_any_todos], [nil, :guest, :removes_confidential_issues_and_internal_notes_and_merge_request_todos], [:reporter, nil, :does_not_remove_any_todos], [:guest, nil, :removes_confidential_issues_and_internal_notes_and_merge_request_todos], [:guest, :reporter, :does_not_remove_any_todos], [:guest, :guest, :removes_confidential_issues_and_internal_notes_and_merge_request_todos] ] end with_them do it_behaves_like 'using different access permissions' end end end # an internal project in an internal/public group is valid context 'when project is not private' do let(:group) { create(:group, :internal) } let(:project) { create(:project, :internal, group: group) } let(:issue) { create(:issue, project: project) } let(:issue_c) { create(:issue, project: project, confidential: true) } it 'enqueues the PrivateFeaturesWorker' do expect(TodosDestroyer::PrivateFeaturesWorker) .to receive(:perform_async).with(project.id, user.id) subject end context 'confidential issues' do context 'when a user is not an author of confidential issue' do it_behaves_like 'removes confidential issues and internal notes todos' end context 'when a user is an author of confidential issue' do before do issue_c.update!(author: user) end it_behaves_like 'removes only internal notes todos' end context 'when a user is an assignee of confidential issue' do before do issue_c.assignees << user end it_behaves_like 'removes only internal notes todos' end context 'access permissions' do where(:group_access, :project_access, :method_name) do [ [nil, :reporter, :does_not_remove_any_todos], [nil, :guest, :removes_confidential_issues_and_internal_notes_todos], [:reporter, nil, :does_not_remove_any_todos], [:guest, nil, :removes_confidential_issues_and_internal_notes_todos], [:guest, :reporter, :does_not_remove_any_todos], [:guest, :guest, :removes_confidential_issues_and_internal_notes_todos] ] end with_them do it_behaves_like 'using different access permissions' end end end context 'feature visibility check' do context 'when issues are visible only to project members' do before do project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE) end it 'removes only users issue todos' do expect { subject }.to change { Todo.count }.from(7).to(5) end end end end end describe 'updating a Group' do subject { described_class.new(user.id, group.id, 'Group').execute } context 'when group is private' do context 'when a user leaves a group' do it 'removes group and subproject todos for the user' do expect { subject }.to change { Todo.count }.from(7).to(2) expect(user.todos).to be_empty expect(user2.todos).to match_array([todo_issue_c_user2, todo_group_user2]) end end context 'access permissions' do where(:group_access, :project_access, :method_name) do [ [nil, :reporter, :does_not_remove_any_todos], [nil, :guest, :removes_confidential_issues_and_internal_notes_and_merge_request_todos], [:reporter, nil, :does_not_remove_any_todos], [:guest, nil, :removes_confidential_issues_and_internal_notes_and_merge_request_todos], [:guest, :reporter, :does_not_remove_any_todos], [:guest, :guest, :removes_confidential_issues_and_internal_notes_and_merge_request_todos] ] end with_them do it_behaves_like 'using different access permissions' end end context 'with nested groups' do let_it_be_with_refind(:parent_group) { create(:group, :public) } let_it_be_with_refind(:parent_subgroup) { create(:group) } let_it_be(:subgroup) { create(:group, :private, parent: group) } let_it_be(:subgroup2) { create(:group, :private, parent: group) } let_it_be(:subproject) { create(:project, group: subgroup) } let_it_be(:subproject2) { create(:project, group: subgroup2) } let!(:todo_subproject_user) { create(:todo, user: user, project: subproject) } let!(:todo_subproject2_user) { create(:todo, user: user, project: subproject2) } let!(:todo_subgroup_user) { create(:todo, user: user, group: subgroup) } let!(:todo_subgroup2_user) { create(:todo, user: user, group: subgroup2) } let!(:todo_subproject_user2) { create(:todo, user: user2, project: subproject) } let!(:todo_subpgroup_user2) { create(:todo, user: user2, group: subgroup) } let!(:todo_parent_group_user) { create(:todo, user: user, group: parent_group) } let(:subproject_internal_note) { create(:note, noteable: issue, project: project, confidential: true) } let!(:todo_for_internal_subproject_note) do create(:todo, user: user, target: issue, project: project, note: subproject_internal_note) end before do group.update!(parent: parent_group) end context 'when the user is not a member of any groups/projects' do it 'removes todos for the user including subprojects todos' do expect { subject }.to change { Todo.count }.from(15).to(5) expect(user.todos).to eq([todo_parent_group_user]) expect(user2.todos) .to match_array( [todo_issue_c_user2, todo_group_user2, todo_subproject_user2, todo_subpgroup_user2] ) end end context 'when the user is member of a parent group' do before do parent_group.add_developer(user) end it_behaves_like 'does not remove any todos' end context 'when the user is member of a subgroup' do before do subgroup.add_developer(user) end it 'does not remove group and subproject todos' do expect { subject }.to change { Todo.count }.from(15).to(8) expect(user.todos) .to match_array( [todo_group_user, todo_subgroup_user, todo_subproject_user, todo_parent_group_user] ) expect(user2.todos) .to match_array( [todo_issue_c_user2, todo_group_user2, todo_subproject_user2, todo_subpgroup_user2] ) end end context 'when the user is member of a child project' do before do subproject.add_developer(user) end it 'does not remove subproject and group todos' do expect { subject }.to change { Todo.count }.from(15).to(8) expect(user.todos) .to match_array( [todo_subgroup_user, todo_group_user, todo_subproject_user, todo_parent_group_user] ) expect(user2.todos) .to match_array( [todo_issue_c_user2, todo_group_user2, todo_subproject_user2, todo_subpgroup_user2] ) end end end end context 'when group is not private' do let(:group) { create(:group, :internal) } let(:project) { create(:project, :internal, group: group) } let(:issue) { create(:issue, project: project) } let(:issue_c) { create(:issue, project: project, confidential: true) } it 'enqueues the PrivateFeaturesWorker' do expect(TodosDestroyer::PrivateFeaturesWorker) .to receive(:perform_async).with(project.id, user.id) subject end context 'access permissions' do where(:group_access, :project_access, :method_name) do [ [nil, nil, :removes_confidential_issues_and_internal_notes_todos], [nil, :reporter, :does_not_remove_any_todos], [nil, :guest, :removes_confidential_issues_and_internal_notes_todos], [:reporter, nil, :does_not_remove_any_todos], [:guest, nil, :removes_confidential_issues_and_internal_notes_todos], [:guest, :reporter, :does_not_remove_any_todos], [:guest, :guest, :removes_confidential_issues_and_internal_notes_todos] ] end with_them do it_behaves_like 'using different access permissions' end end end end context 'when entity type is not valid' do it 'raises an exception' do expect { described_class.new(user.id, group.id, 'GroupWrongly').execute } .to raise_error(ArgumentError) end end context 'when entity was not found' do it 'does not remove any todos' do expect { described_class.new(user.id, non_existing_record_id, 'Group').execute } .not_to change { Todo.count } end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Todos module Destroy # Service class for deleting todos that belongs to confidential issues. # It deletes todos for users that are not at least reporters, issue author or assignee. # # Accepts issue_id or project_id as argument. # When issue_id is passed it deletes matching todos for one confidential issue. # When project_id is passed it deletes matching todos for all confidential issues of the project. class ConfidentialIssueService < ::Todos::Destroy::BaseService attr_reader :issues def initialize(issue_id: nil, project_id: nil) @issues = if issue_id Issue.id_in(issue_id) elsif project_id project_confidential_issues(project_id) end end def execute return unless todos_to_remove? ::Gitlab::Database.allow_cross_joins_across_databases( url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/422045') do delete_todos end end private def delete_todos authorized_users = ProjectAuthorization.select(:user_id) .for_project(project_ids) .non_guests todos.not_in_users(authorized_users).delete_all end def project_confidential_issues(project_id) project = Project.find(project_id) project.issues.confidential_only end # rubocop: disable CodeReuse/ActiveRecord def todos Todo.joins_issue_and_assignees .for_target(issues) .merge(Issue.confidential_only) .where('todos.user_id != issues.author_id') .where('todos.user_id != issue_assignees.user_id') end # rubocop: enable CodeReuse/ActiveRecord def todos_to_remove? issues&.any?(&:confidential?) end def project_ids issues&.distinct&.select(:project_id) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Todos::Destroy::ConfidentialIssueService, feature_category: :team_planning do let(:project) { create(:project, :public) } let(:user) { create(:user) } let(:author) { create(:user) } let(:assignee) { create(:user) } let(:guest) { create(:user) } let(:project_member) { create(:user) } let(:issue_1) { create(:issue, :confidential, project: project, author: author, assignees: [assignee]) } describe '#execute' do before do project.add_developer(project_member) project.add_guest(guest) # todos not to be deleted create(:todo, user: project_member, target: issue_1, project: project) create(:todo, user: author, target: issue_1, project: project) create(:todo, user: assignee, target: issue_1, project: project) create(:todo, user: user, project: project) # Todos to be deleted create(:todo, user: guest, target: issue_1, project: project) create(:todo, user: user, target: issue_1, project: project) end subject { described_class.new(issue_id: issue_1.id).execute } context 'when issue_id parameter is present' do context 'when provided issue is confidential' do it 'removes issue todos for users who can not access the confidential issue' do expect { subject }.to change { Todo.count }.from(6).to(4) end end context 'when provided issue is not confidential' do it 'does not remove any todos' do issue_1.update!(confidential: false) expect { subject }.not_to change { Todo.count } end end end context 'when project_id parameter is present' do subject { described_class.new(issue_id: nil, project_id: project.id).execute } it 'removes issues todos for users that cannot access confidential issues' do issue_2 = create(:issue, :confidential, project: project) issue_3 = create(:issue, :confidential, project: project, author: author, assignees: [assignee]) issue_4 = create(:issue, project: project) # Todos not to be deleted create(:todo, user: guest, target: issue_1, project: project) create(:todo, user: assignee, target: issue_1, project: project) create(:todo, user: project_member, target: issue_2, project: project) create(:todo, user: author, target: issue_3, project: project) create(:todo, user: user, target: issue_4, project: project) create(:todo, user: user, project: project) # Todos to be deleted create(:todo, user: user, target: issue_1, project: project) create(:todo, user: guest, target: issue_2, project: project) expect { subject }.to change { Todo.count }.from(14).to(10) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Events class DestroyService BATCH_SIZE = 50 def initialize(project) @project = project end def execute loop do count = delete_events_in_batches break if count < BATCH_SIZE end ServiceResponse.success(message: 'Events were deleted.') rescue StandardError => e ServiceResponse.error(message: e.message) end private attr_reader :project def delete_events_in_batches project.events.limit(BATCH_SIZE).delete_all end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Events::DestroyService, feature_category: :user_profile do subject(:service) { described_class.new(project) } let_it_be(:project, reload: true) { create(:project, :repository) } let_it_be(:another_project) { create(:project) } let_it_be(:merge_request) { create(:merge_request, source_project: project) } let_it_be(:user) { create(:user) } let!(:unrelated_event) { create(:event, :merged, project: another_project, target: another_project, author: user) } before do create(:event, :created, project: project, target: project, author: user) create(:event, :created, project: project, target: merge_request, author: user) create(:event, :merged, project: project, target: merge_request, author: user) end let(:events) { project.events } describe '#execute', :aggregate_failures do it 'deletes the events' do response = nil expect { response = subject.execute }.to change(Event, :count).by(-3) expect(response).to be_success expect(unrelated_event.reload).to be_present end context 'batch delete' do before do stub_const("#{described_class}::BATCH_SIZE", 2) end it 'splits delete queries into batches' do expect(project).to receive(:events).twice.and_call_original subject.execute end end context 'when an error is raised while deleting the records' do before do allow(project).to receive_message_chain(:events, :limit, :delete_all).and_raise(ActiveRecord::ActiveRecordError, 'custom error') end it 'returns error' do response = subject.execute expect(response).to be_error expect(response.message).to eq 'custom error' end it 'does not delete events' do expect { subject.execute }.not_to change(Event, :count) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Events class RenderService < BaseRenderer def execute(events, atom_request: false) notes = events.map(&:note).compact render_notes(notes, atom_request) end private def render_notes(notes, atom_request) Notes::RenderService .new(current_user) .execute(notes, render_options(atom_request)) end def render_options(atom_request) return {} unless atom_request { only_path: false, xhtml: true } end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Events::RenderService, feature_category: :user_profile do describe '#execute' do let!(:note) { build(:note) } let!(:event) { build(:event, target: note, project: note.project) } let!(:user) { build(:user) } context 'when the request format is atom' do it 'renders the note inside events' do expect(Banzai::ObjectRenderer).to receive(:new) .with(user: user, redaction_context: { only_path: false, xhtml: true }) .and_call_original expect_any_instance_of(Banzai::ObjectRenderer) .to receive(:render).with([note], :note) described_class.new(user).execute([event], atom_request: true) end end context 'when the request format is not atom' do it 'renders the note inside events' do expect(Banzai::ObjectRenderer).to receive(:new) .with(user: user, redaction_context: {}) .and_call_original expect_any_instance_of(Banzai::ObjectRenderer) .to receive(:render).with([note], :note) described_class.new(user).execute([event], atom_request: false) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import class FogbugzService < Import::BaseService attr_reader :client, :params, :current_user def execute(credentials) url = credentials[:uri] if blocked_url?(url) return log_and_return_error("Invalid URL: #{url}", _("Invalid URL: %{url}") % { url: url }, :bad_request) end unless authorized? return log_and_return_error( "You don't have permissions to import this project", _("You don't have permissions to import this project"), :unauthorized ) end unless repo return log_and_return_error( "Project #{repo_id} could not be found", s_("Fogbugz|Project %{repo} could not be found") % { repo: repo_id }, :unprocessable_entity) end project = create_project(credentials) if project.persisted? success(project) elsif project.errors[:import_source_disabled].present? error(project.errors[:import_source_disabled], :forbidden) else error(project_save_error(project), :unprocessable_entity) end rescue StandardError => e log_and_return_error( "Fogbugz import failed due to an error: #{e}", s_("Fogbugz|Fogbugz import failed due to an error: %{error}" % { error: e }), :bad_request) end private def create_project(credentials) Gitlab::FogbugzImport::ProjectCreator.new( repo, project_name, target_namespace, current_user, credentials, umap ).execute end def repo_id @repo_id ||= params[:repo_id] end def repo @repo ||= client.repo(repo_id) end def project_name @project_name ||= params[:new_name].presence || repo.name end def namespace_path @namespace_path ||= params[:target_namespace].presence || current_user.namespace_path end def target_namespace @target_namespace ||= find_or_create_namespace(namespace_path, current_user.namespace_path) end def umap @umap ||= params[:umap] end def allow_local_requests? Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services? end def blocked_url?(url) Gitlab::UrlBlocker.blocked_url?( url, allow_localhost: allow_local_requests?, allow_local_network: allow_local_requests?, schemes: %w[http https] ) end def log_and_return_error(message, translated_message, error_type) log_error(message) error(translated_message, error_type) end def log_error(message) Gitlab::Import::Logger.error( message: 'Import failed due to a Fogbugz error', error: message ) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Import::FogbugzService, feature_category: :importers do let_it_be(:user) { create(:user) } let(:base_uri) { "https://test:7990" } let(:token) { "asdasd12345" } let(:repo_id) { "fogbugz_id" } let(:repo) { instance_double(Gitlab::FogbugzImport::Repository, name: 'test', raw_data: nil) } let(:client) { instance_double(Gitlab::FogbugzImport::Client) } let(:credentials) { { uri: base_uri, token: token } } let(:params) { { repo_id: repo_id } } subject { described_class.new(client, user, params) } before do allow(subject).to receive(:authorized?).and_return(true) stub_application_setting(import_sources: ['fogbugz']) end context 'when no repo is found' do before do allow(client).to receive(:repo).with(repo_id).and_return(nil) end it 'returns an error' do result = subject.execute(credentials) expect(result).to include( message: "Project #{repo_id} could not be found", status: :error, http_status: :unprocessable_entity ) end end context 'when import source is disabled' do before do stub_application_setting(import_sources: nil) allow(client).to receive(:repo).with(repo_id).and_return(repo) end it 'returns forbidden' do result = subject.execute(credentials) expect(result).to include( status: :error, http_status: :forbidden ) end end context 'when user is unauthorized' do before do allow(subject).to receive(:authorized?).and_return(false) end it 'returns an error' do result = subject.execute(credentials) expect(result).to include( message: "You don't have permissions to import this project", status: :error, http_status: :unauthorized ) end end context 'verify url' do shared_examples 'denies local request' do before do allow(client).to receive(:repo).with(repo_id).and_return(repo) end it 'does not allow requests' do result = subject.execute(credentials) expect(result[:status]).to eq(:error) expect(result[:message]).to include("Invalid URL:") end end context 'when host is localhost' do let(:base_uri) { 'http://localhost:3000' } include_examples 'denies local request' end context 'when host is on local network' do let(:base_uri) { 'https://192.168.0.191' } include_examples 'denies local request' end context 'when host is ftp protocol' do let(:base_uri) { 'ftp://testing' } include_examples 'denies local request' end end context 'when import starts succesfully' do before do allow(client).to receive(:repo).with(repo_id).and_return( instance_double(Gitlab::FogbugzImport::Repository, name: 'test', raw_data: nil) ) end it 'returns success' do result = subject.execute(credentials) expect(result[:status]).to eq(:success) expect(result[:project].name).to eq('test') end end context 'when import fails to start' do let(:error_messages_array) { instance_double(Array, join: "something went wrong") } let(:errors_double) { instance_double(ActiveModel::Errors, full_messages: error_messages_array, :[] => nil) } let(:project_double) { instance_double(Project, persisted?: false, errors: errors_double) } let(:project_creator) { instance_double(Gitlab::FogbugzImport::ProjectCreator, execute: project_double) } before do allow(Gitlab::FogbugzImport::ProjectCreator).to receive(:new).and_return(project_creator) allow(client).to receive(:repo).with(repo_id).and_return( instance_double(Gitlab::FogbugzImport::Repository, name: 'test', raw_data: nil) ) end it 'returns error' do result = subject.execute(credentials) expect(result[:status]).to eq(:error) expect(result[:message]).to eq("something went wrong") end end it 'returns error for unknown error causes' do message = 'Not Implemented' exception = StandardError.new(message) allow(client).to receive(:repo).and_raise(exception) expect(subject.execute(credentials)).to include({ status: :error, message: "Fogbugz import failed due to an error: #{message}" }) end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import class PrepareService < ::BaseService def execute uploader = UploadService.new(project, params[:file]).execute if uploader enqueue_import(uploader.upload) ServiceResponse.success(message: success_message) else ServiceResponse.error(message: _('File upload error.')) end end private def enqueue_import(upload) worker.perform_async(current_user.id, project.id, upload.id) end def worker raise NotImplementedError end def success_message raise NotImplementedError end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Import::PrepareService, feature_category: :importers do let_it_be(:project) { create(:project) } let_it_be(:user) { create(:user) } let(:file) { double } let(:upload_service) { double } let(:uploader) { double } let(:upload) { double } let(:service) { described_class.new(project, user, file: file) } subject { service.execute } context 'when file is uploaded correctly' do let(:upload_id) { 99 } before do mock_upload end it 'raises NotImplemented error for worker' do expect { subject }.to raise_error(NotImplementedError) end context 'when a job is enqueued' do before do worker = double allow(service).to receive(:worker).and_return(worker) allow(worker).to receive(:perform_async) end it 'raises NotImplemented error for success_message when a job is enqueued' do expect { subject }.to raise_error(NotImplementedError) end it 'returns a success respnse when a success_message is implemented' do message = 'It works!' allow(service).to receive(:success_message).and_return(message) result = subject expect(result).to be_success expect(result.message).to eq(message) end end end context 'when file upload fails' do before do mock_upload(false) end it 'returns an error message' do result = subject expect(result[:status]).to eq(:error) expect(result[:message]).to eq('File upload error.') end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import class ValidateRemoteGitEndpointService # Validates if the remote endpoint is a valid GIT repository # Only smart protocol is supported # Validation rules are taken from https://git-scm.com/docs/http-protocol#_smart_clients GIT_SERVICE_NAME = "git-upload-pack" GIT_EXPECTED_FIRST_PACKET_LINE = "# service=#{GIT_SERVICE_NAME}" GIT_BODY_MESSAGE_REGEXP = /^[0-9a-f]{4}#{GIT_EXPECTED_FIRST_PACKET_LINE}/ # https://github.com/git/git/blob/master/Documentation/technical/protocol-common.txt#L56-L59 GIT_PROTOCOL_PKT_LEN = 4 GIT_MINIMUM_RESPONSE_LENGTH = GIT_PROTOCOL_PKT_LEN + GIT_EXPECTED_FIRST_PACKET_LINE.length EXPECTED_CONTENT_TYPE = "application/x-#{GIT_SERVICE_NAME}-advertisement" INVALID_BODY_MESSAGE = 'Not a git repository: Invalid response body' INVALID_CONTENT_TYPE_MESSAGE = 'Not a git repository: Invalid content-type' def initialize(params) @params = params end def execute uri = Gitlab::Utils.parse_url(@params[:url]) if !uri || !uri.hostname || Project::VALID_IMPORT_PROTOCOLS.exclude?(uri.scheme) return ServiceResponse.error(message: "#{@params[:url]} is not a valid URL") end return ServiceResponse.success if uri.scheme == 'git' uri.fragment = nil url = Gitlab::Utils.append_path(uri.to_s, "/info/refs?service=#{GIT_SERVICE_NAME}") response, response_body = http_get_and_extract_first_chunks(url) validate(uri, response, response_body) rescue *Gitlab::HTTP::HTTP_ERRORS => err error_result("HTTP #{err.class.name.underscore} error: #{err.message}") rescue StandardError => err ServiceResponse.error( message: "Internal #{err.class.name.underscore} error: #{err.message}", reason: 500 ) end private def http_get_and_extract_first_chunks(url) # We are interested only in the first chunks of the response # So we're using stream_body: true and breaking when receive enough body response = nil response_body = '' Gitlab::HTTP.get(url, stream_body: true, follow_redirects: false, basic_auth: auth) do |response_chunk| response = response_chunk response_body += response_chunk break if GIT_MINIMUM_RESPONSE_LENGTH <= response_body.length end [response, response_body] end def auth unless @params[:user].to_s.blank? { username: @params[:user], password: @params[:password] } end end def validate(uri, response, response_body) return status_code_error(uri, response) unless status_code_is_valid?(response) return error_result(INVALID_CONTENT_TYPE_MESSAGE) unless content_type_is_valid?(response) return error_result(INVALID_BODY_MESSAGE) unless response_body_is_valid?(response_body) ServiceResponse.success end def status_code_error(uri, response) http_code = response.http_response.code.to_i message = response.http_response.message || Rack::Utils::HTTP_STATUS_CODES[http_code] error_result( "#{uri} endpoint error: #{http_code}#{message.presence&.prepend(' ')}", http_code ) end def error_result(message, reason = nil) ServiceResponse.error(message: message, reason: reason) end def status_code_is_valid?(response) response.http_response.code == '200' end def content_type_is_valid?(response) response.http_response['content-type'] == EXPECTED_CONTENT_TYPE end def response_body_is_valid?(response_body) response_body.match?(GIT_BODY_MESSAGE_REGEXP) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Import::ValidateRemoteGitEndpointService, feature_category: :importers do include StubRequests let_it_be(:base_url) { 'http://demo.host/path' } let_it_be(:endpoint_url) { "#{base_url}/info/refs?service=git-upload-pack" } let_it_be(:endpoint_error_message) { "#{base_url} endpoint error:" } let_it_be(:body_error_message) { described_class::INVALID_BODY_MESSAGE } let_it_be(:content_type_error_message) { described_class::INVALID_CONTENT_TYPE_MESSAGE } describe '#execute' do let(:valid_response) do { status: 200, body: '001e# service=git-upload-pack', headers: { 'Content-Type': 'application/x-git-upload-pack-advertisement' } } end it 'correctly handles URLs with fragment' do allow(Gitlab::HTTP).to receive(:get) described_class.new(url: "#{base_url}#somehash").execute expect(Gitlab::HTTP).to have_received(:get).with(endpoint_url, basic_auth: nil, stream_body: true, follow_redirects: false) end context 'when uri is using git:// protocol' do subject { described_class.new(url: 'git://demo.host/repo') } it 'returns success' do result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.success?).to be(true) end end context 'when uri is using an invalid protocol' do subject { described_class.new(url: 'ssh://demo.host/repo') } it 'reports error when invalid URL is provided' do result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) end end context 'when uri is invalid' do subject { described_class.new(url: 'http:example.com') } it 'reports error when invalid URL is provided' do result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) end end context 'when receiving HTTP response' do subject { described_class.new(url: base_url) } it 'returns success when HTTP response is valid and contains correct payload' do stub_full_request(endpoint_url, method: :get).to_return(valid_response) result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.success?).to be(true) end it 'reports error when status code is not 200' do error_response = { status: 401 } stub_full_request(endpoint_url, method: :get).to_return(error_response) result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq("#{endpoint_error_message} #{error_response[:status]}") end it 'reports error when invalid URL is provided' do result = described_class.new(url: 1).execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq('1 is not a valid URL') end it 'reports error when required header is missing' do stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ headers: nil })) result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq(content_type_error_message) end it 'reports error when body is too short' do stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ body: 'invalid content' })) result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq(body_error_message) end it 'reports error when body is in invalid format' do stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ body: 'invalid long content with no git respons whatshowever' })) result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq(body_error_message) end it 'reports error when http exceptions are raised' do err = SocketError.new('dummy message') stub_full_request(endpoint_url, method: :get).to_raise(err) result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq("HTTP #{err.class.name.underscore} error: #{err.message}") end it 'reports error when other exceptions are raised' do err = StandardError.new('internal dummy message') stub_full_request(endpoint_url, method: :get).to_raise(err) result = subject.execute expect(result).to be_a(ServiceResponse) expect(result.error?).to be(true) expect(result.message).to eq("Internal #{err.class.name.underscore} error: #{err.message}") end end it 'passes basic auth when credentials are provided' do allow(Gitlab::HTTP).to receive(:get) described_class.new(url: "#{base_url}#somehash", user: 'user', password: 'password').execute expect(Gitlab::HTTP).to have_received(:get).with(endpoint_url, basic_auth: { username: 'user', password: 'password' }, stream_body: true, follow_redirects: false) end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import class BitbucketServerService < Import::BaseService attr_reader :client, :params, :current_user def execute(credentials) if blocked_url? return log_and_return_error("Invalid URL: #{url}", :bad_request) end unless authorized? return log_and_return_error("You don't have permissions to import this project", :unauthorized) end unless repo return log_and_return_error("Project %{project_repo} could not be found" % { project_repo: "#{project_key}/#{repo_slug}" }, :unprocessable_entity) end project = create_project(credentials) track_access_level('bitbucket') if project.persisted? success(project) elsif project.errors[:import_source_disabled].present? error(project.errors[:import_source_disabled], :forbidden) else log_and_return_error(project_save_error(project), :unprocessable_entity) end rescue BitbucketServer::Connection::ConnectionError => e log_and_return_error("Import failed due to a BitBucket Server error: #{e}", :bad_request) end private def create_project(credentials) Gitlab::BitbucketServerImport::ProjectCreator.new( project_key, repo_slug, repo, project_name, target_namespace, current_user, credentials, timeout_strategy ).execute end def repo @repo ||= client.repo(project_key, repo_slug) end def project_name @project_name ||= params[:new_name].presence || repo.name end def namespace_path @namespace_path ||= params[:new_namespace].presence || current_user.namespace_path end def target_namespace @target_namespace ||= find_or_create_namespace(namespace_path, current_user.namespace_path) end def repo_slug @repo_slug ||= params[:bitbucket_server_repo] end def project_key @project_key ||= params[:bitbucket_server_project] end def url @url ||= params[:bitbucket_server_url] end def timeout_strategy @timeout_strategy ||= params[:timeout_strategy] || ProjectImportData::PESSIMISTIC_TIMEOUT end def allow_local_requests? Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services? end def blocked_url? Gitlab::UrlBlocker.blocked_url?( url, allow_localhost: allow_local_requests?, allow_local_network: allow_local_requests?, schemes: %w[http https] ) end def log_and_return_error(message, error_type) log_error(message) error(_(message), error_type) end def log_error(message) Gitlab::Import::Logger.error( message: 'Import failed due to a BitBucket Server error', error: message ) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Import::BitbucketServerService, feature_category: :importers do let_it_be(:user) { create(:user) } let(:base_uri) { "https://test:7990" } let(:token) { "asdasd12345" } let(:secret) { "sekrettt" } let(:project_key) { 'TES' } let(:repo_slug) { 'vim' } let(:repo) do { name: 'vim', description: 'test', visibility_level: Gitlab::VisibilityLevel::PUBLIC, browse_url: 'http://repo.com/repo/repo', clone_url: 'http://repo.com/repo/repo.git' } end let(:client) { double(BitbucketServer::Client) } let(:credentials) { { base_uri: base_uri, user: user, password: token } } let(:params) { { bitbucket_server_url: base_uri, bitbucket_server_username: user, personal_access_token: token, bitbucket_server_project: project_key, bitbucket_server_repo: repo_slug } } subject { described_class.new(client, user, params) } before do allow(subject).to receive(:authorized?).and_return(true) end context 'execute' do before do allow(subject).to receive(:authorized?).and_return(true) allow(client).to receive(:repo).with(project_key, repo_slug).and_return(double(repo)) end it 'tracks an access level event' do subject.execute(credentials) expect_snowplow_event( category: 'Import::BitbucketServerService', action: 'create', label: 'import_access_level', user: user, extra: { import_type: 'bitbucket', user_role: 'Owner' } ) end end context 'when no repo is found' do before do allow(subject).to receive(:authorized?).and_return(true) allow(client).to receive(:repo).and_return(nil) end it 'returns an error' do result = subject.execute(credentials) expect(result).to include( message: "Project #{project_key}/#{repo_slug} could not be found", status: :error, http_status: :unprocessable_entity ) end end context 'when import source is disabled' do before do stub_application_setting(import_sources: nil) allow(subject).to receive(:authorized?).and_return(true) allow(client).to receive(:repo).with(project_key, repo_slug).and_return(double(repo)) end it 'returns forbidden' do result = subject.execute(credentials) expect(result).to include( status: :error, http_status: :forbidden ) end end context 'when user is unauthorized' do before do allow(subject).to receive(:authorized?).and_return(false) end it 'returns an error' do result = subject.execute(credentials) expect(result).to include( message: "You don't have permissions to import this project", status: :error, http_status: :unauthorized ) end end context 'verify url' do shared_examples 'denies local request' do before do allow(client).to receive(:repo).with(project_key, repo_slug).and_return(double(repo)) end it 'does not allow requests' do result = subject.execute(credentials) expect(result[:status]).to eq(:error) expect(result[:message]).to include("Invalid URL:") end end context 'when host is localhost' do before do allow(subject).to receive(:url).and_return('https://localhost:3000') end include_examples 'denies local request' end context 'when host is on local network' do before do allow(subject).to receive(:url).and_return('https://192.168.0.191') end include_examples 'denies local request' end context 'when host is ftp protocol' do before do allow(subject).to receive(:url).and_return('ftp://testing') end include_examples 'denies local request' end end it 'raises an exception for unknown error causes' do exception = StandardError.new('Not Implemented') allow(client).to receive(:repo).and_raise(exception) expect(Gitlab::Import::Logger).not_to receive(:error) expect { subject.execute(credentials) }.to raise_error(exception) end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import class GithubService < Import::BaseService include ActiveSupport::NumberHelper include Gitlab::Utils::StrongMemoize attr_accessor :client attr_reader :params, :current_user def execute(access_params, provider) context_error = validate_context return context_error if context_error project = create_project(access_params, provider) track_access_level('github') if project.persisted? store_import_settings(project, access_params) success(project) elsif project.errors[:import_source_disabled].present? error(project.errors[:import_source_disabled], :forbidden) else error(project_save_error(project), :unprocessable_entity) end rescue Octokit::Error => e log_error(e) end def create_project(access_params, provider) Gitlab::LegacyGithubImport::ProjectCreator.new( repo, project_name, target_namespace, current_user, type: provider, **access_params ).execute(extra_project_attrs) end def repo @repo ||= client.repository(params[:repo_id].to_i) end def project_name @project_name ||= params[:new_name].presence || repo[:name] end def target_namespace @target_namespace ||= Namespace.find_by_full_path(target_namespace_path) end def extra_project_attrs {} end def oversized? repository_size_limit > 0 && repo[:size] > repository_size_limit end def oversize_error_message _('"%{repository_name}" size (%{repository_size}) is larger than the limit of %{limit}.') % { repository_name: repo[:name], repository_size: number_to_human_size(repo[:size]), limit: number_to_human_size(repository_size_limit) } end def repository_size_limit strong_memoize :repository_size_limit do namespace_limit = target_namespace.repository_size_limit.to_i if namespace_limit > 0 namespace_limit else Gitlab::CurrentSettings.repository_size_limit.to_i end end end def url @url ||= params[:github_hostname] end def allow_local_requests? Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services? end def blocked_url? Gitlab::UrlBlocker.blocked_url?( url, allow_localhost: allow_local_requests?, allow_local_network: allow_local_requests?, schemes: %w[http https] ) end private def validate_context if blocked_url? log_and_return_error("Invalid URL: #{url}", _("Invalid URL: %{url}") % { url: url }, :bad_request) elsif target_namespace.nil? error(_('Namespace or group to import repository into does not exist.'), :unprocessable_entity) elsif !authorized? error(_('You are not allowed to import projects in this namespace.'), :unprocessable_entity) elsif oversized? error(oversize_error_message, :unprocessable_entity) end end def target_namespace_path raise ArgumentError, 'Target namespace is required' if params[:target_namespace].blank? params[:target_namespace] end def log_error(exception) Gitlab::GithubImport::Logger.error( message: 'Import failed due to a GitHub error', status: exception.response_status, error: exception.response_body ) error(_('Import failed due to a GitHub error: %{original} (HTTP %{code})') % { original: exception.response_body, code: exception.response_status }, :unprocessable_entity) end def log_and_return_error(message, translated_message, http_status) Gitlab::GithubImport::Logger.error( message: 'Error while attempting to import from GitHub', error: message ) error(translated_message, http_status) end def store_import_settings(project, access_params) Gitlab::GithubImport::Settings .new(project) .write( timeout_strategy: params[:timeout_strategy] || ProjectImportData::PESSIMISTIC_TIMEOUT, optional_stages: params[:optional_stages], additional_access_tokens: access_params[:additional_access_tokens] ) end end end Import::GithubService.prepend_mod_with('Import::GithubService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Import::GithubService, feature_category: :importers do let_it_be(:user) { create(:user) } let_it_be(:token) { 'complex-token' } let_it_be(:access_params) do { github_access_token: 'github-complex-token', additional_access_tokens: %w[foo bar] } end let(:settings) { instance_double(Gitlab::GithubImport::Settings) } let(:user_namespace_path) { user.namespace_path } let(:optional_stages) { nil } let(:timeout_strategy) { "optimistic" } let(:params) do { repo_id: 123, new_name: 'new_repo', target_namespace: user_namespace_path, optional_stages: optional_stages, timeout_strategy: timeout_strategy } end let(:client) { Gitlab::GithubImport::Client.new(token) } let(:project_double) { instance_double(Project, persisted?: true) } subject(:github_importer) { described_class.new(client, user, params) } before do allow(Gitlab::GithubImport::Settings).to receive(:new).with(project_double).and_return(settings) allow(settings) .to receive(:write) .with( optional_stages: optional_stages, additional_access_tokens: access_params[:additional_access_tokens], timeout_strategy: timeout_strategy ) end context 'do not raise an exception on input error' do let(:exception) { Octokit::ClientError.new(status: 404, body: 'Not Found') } before do expect(client).to receive(:repository).and_raise(exception) end it 'logs the original error' do expect(Gitlab::Import::Logger).to receive(:error).with({ message: 'Import failed due to a GitHub error', status: 404, error: 'Not Found' }).and_call_original subject.execute(access_params, :github) end it 'returns an error with message and code' do result = subject.execute(access_params, :github) expect(result).to include( message: 'Import failed due to a GitHub error: Not Found (HTTP 404)', status: :error, http_status: :unprocessable_entity ) end end it 'raises an exception for unknown error causes' do exception = StandardError.new('Not Implemented') expect(client).to receive(:repository).and_raise(exception) expect(Gitlab::Import::Logger).not_to receive(:error) expect { subject.execute(access_params, :github) }.to raise_error(exception) end context 'repository size validation' do let(:repository_double) { { name: 'repository', size: 99 } } before do allow(subject).to receive(:authorized?).and_return(true) expect(client).to receive(:repository).and_return(repository_double) allow_next_instance_of(Gitlab::LegacyGithubImport::ProjectCreator) do |creator| allow(creator).to receive(:execute).and_return(project_double) end end context 'when there is no repository size limit defined' do it 'skips the check, succeeds, and tracks an access level' do expect(subject.execute(access_params, :github)).to include(status: :success) expect(settings) .to have_received(:write) .with(optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens], timeout_strategy: timeout_strategy ) expect_snowplow_event( category: 'Import::GithubService', action: 'create', label: 'import_access_level', user: user, extra: { import_type: 'github', user_role: 'Owner' } ) end end context 'when the target namespace repository size limit is defined' do let_it_be(:group) { create(:group, repository_size_limit: 100) } before do params[:target_namespace] = group.full_path end it 'succeeds when the repository is smaller than the limit' do expect(subject.execute(access_params, :github)).to include(status: :success) expect(settings) .to have_received(:write) .with( optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens], timeout_strategy: timeout_strategy ) expect_snowplow_event( category: 'Import::GithubService', action: 'create', label: 'import_access_level', user: user, extra: { import_type: 'github', user_role: 'Not a member' } ) end it 'returns error when the repository is larger than the limit' do repository_double[:size] = 101 expect(subject.execute(access_params, :github)).to include(size_limit_error) end end context 'when target namespace repository limit is not defined' do let_it_be(:group) { create(:group) } before do stub_application_setting(repository_size_limit: 100) end context 'when application size limit is defined' do it 'succeeds when the repository is smaller than the limit' do expect(subject.execute(access_params, :github)).to include(status: :success) expect(settings) .to have_received(:write) .with( optional_stages: nil, additional_access_tokens: access_params[:additional_access_tokens], timeout_strategy: timeout_strategy ) expect_snowplow_event( category: 'Import::GithubService', action: 'create', label: 'import_access_level', user: user, extra: { import_type: 'github', user_role: 'Owner' } ) end it 'returns error when the repository is larger than the limit' do repository_double[:size] = 101 expect(subject.execute(access_params, :github)).to include(size_limit_error) end end end context 'when optional stages params present' do let(:optional_stages) do { single_endpoint_issue_events_import: true, single_endpoint_notes_import: 'false', attachments_import: false, collaborators_import: true } end it 'saves optional stages choice to import_data' do subject.execute(access_params, :github) expect(settings) .to have_received(:write) .with( optional_stages: optional_stages, additional_access_tokens: access_params[:additional_access_tokens], timeout_strategy: timeout_strategy ) end end context 'when timeout strategy param is present' do let(:timeout_strategy) { 'pessimistic' } it 'saves timeout strategy to import_data' do subject.execute(access_params, :github) expect(settings) .to have_received(:write) .with( optional_stages: optional_stages, additional_access_tokens: access_params[:additional_access_tokens], timeout_strategy: timeout_strategy ) end end context 'when additional access tokens are present' do it 'saves additional access tokens to import_data' do subject.execute(access_params, :github) expect(settings) .to have_received(:write) .with( optional_stages: optional_stages, additional_access_tokens: %w[foo bar], timeout_strategy: timeout_strategy ) end end end context 'when import source is disabled' do let(:repository_double) do { name: 'vim', description: 'test', full_name: 'test/vim', clone_url: 'http://repo.com/repo/repo.git', private: false, has_wiki?: false } end before do stub_application_setting(import_sources: nil) allow(client).to receive(:repository).and_return(repository_double) end it 'returns forbidden' do result = subject.execute(access_params, :github) expect(result).to include( status: :error, http_status: :forbidden ) end end context 'when a blocked/local URL is used as github_hostname' do let(:message) { 'Error while attempting to import from GitHub' } let(:error) { "Invalid URL: #{url}" } before do stub_application_setting(allow_local_requests_from_web_hooks_and_services: false) end where(url: %w[https://localhost https://10.0.0.1]) with_them do it 'returns and logs an error' do allow(github_importer).to receive(:url).and_return(url) expect(Gitlab::Import::Logger).to receive(:error).with({ message: message, error: error }).and_call_original expect(github_importer.execute(access_params, :github)).to include(blocked_url_error(url)) end end end context 'when target_namespace is blank' do before do params[:target_namespace] = '' end it 'raises an exception' do expect { subject.execute(access_params, :github) }.to raise_error(ArgumentError, 'Target namespace is required') end end context 'when namespace to import repository into does not exist' do before do params[:target_namespace] = 'unknown_path' end it 'returns an error' do expect(github_importer.execute(access_params, :github)).to include(not_existed_namespace_error) end end context 'when user has no permissions to import repository into the specified namespace' do let_it_be(:group) { create(:group) } before do params[:target_namespace] = group.full_path end it 'returns an error' do expect(github_importer.execute(access_params, :github)).to include(taken_namespace_error) end end def size_limit_error { status: :error, http_status: :unprocessable_entity, message: '"repository" size (101 B) is larger than the limit of 100 B.' } end def blocked_url_error(url) { status: :error, http_status: :bad_request, message: "Invalid URL: #{url}" } end def not_existed_namespace_error { status: :error, http_status: :unprocessable_entity, message: 'Namespace or group to import repository into does not exist.' } end def taken_namespace_error { status: :error, http_status: :unprocessable_entity, message: 'You are not allowed to import projects in this namespace.' } end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true # Creates a new project with an associated project export file to be imported # The associated project export file might be associated with different strategies # to acquire the file to be imported, the default file_acquisition_strategy # is uploading a file (Import::GitlabProjects::FileAcquisitionStrategies::FileUpload) module Import module GitlabProjects class CreateProjectService include ActiveModel::Validations include ::Services::ReturnServiceResponses validates_presence_of :path, :namespace # Creates a new CreateProjectService. # # @param [User] current_user # @param [Hash] :params # @param [Import::GitlabProjects::FileAcquisitionStrategies::*] :file_acquisition_strategy def initialize(current_user, params:, file_acquisition_strategy: FileAcquisitionStrategies::FileUpload) @current_user = current_user @params = params.dup @strategy = file_acquisition_strategy.new(current_user: current_user, params: params) end # Creates a project with the strategy parameters # # @return [Services::ServiceReponse] def execute return error(errors.full_messages) unless valid? return error(project.errors.full_messages) unless project.saved? success(project) rescue StandardError => e error(e.message) end # Cascade the validation to strategy def valid? super && strategy.valid? end # Merge with strategy's errors def errors super.tap { _1.merge!(strategy.errors) } end def read_attribute_for_validation(key) params[key] end private attr_reader :current_user, :params, :strategy def error(messages) messages = Array.wrap(messages) message = messages.shift super(message, :bad_request, pass_back: { other_errors: messages }) end def project @project ||= ::Projects::GitlabProjectsImportService.new( current_user, project_params, params[:override] ).execute end def project_params { name: params[:name], path: params[:path], namespace_id: params[:namespace].id, overwrite: params[:overwrite], import_type: 'gitlab_project' }.merge(strategy.project_params) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ::Import::GitlabProjects::CreateProjectService, :aggregate_failures, feature_category: :importers do let(:fake_file_acquisition_strategy) do Class.new do attr_reader :errors def initialize(...) @errors = ActiveModel::Errors.new(self) end def valid? true end def project_params {} end end end let(:params) do { path: 'path', namespace: user.namespace, name: 'name' } end let_it_be(:user) { create(:user) } subject { described_class.new(user, params: params, file_acquisition_strategy: FakeStrategy) } before do stub_const('FakeStrategy', fake_file_acquisition_strategy) stub_application_setting(import_sources: ['gitlab_project']) end describe 'validation' do it { expect(subject).to be_valid } it 'validates presence of path' do params[:path] = nil invalid = described_class.new(user, params: params, file_acquisition_strategy: FakeStrategy) expect(invalid).not_to be_valid expect(invalid.errors.full_messages).to include("Path can't be blank") end it 'validates presence of name' do params[:namespace] = nil invalid = described_class.new(user, params: params, file_acquisition_strategy: FakeStrategy) expect(invalid).not_to be_valid expect(invalid.errors.full_messages).to include("Namespace can't be blank") end it 'is invalid if the strategy is invalid' do expect_next_instance_of(FakeStrategy) do |strategy| allow(strategy).to receive(:valid?).and_return(false) allow(strategy).to receive(:errors).and_wrap_original do |original| original.call.tap do |errors| errors.add(:base, "some error") end end end invalid = described_class.new(user, params: params, file_acquisition_strategy: FakeStrategy) expect(invalid).not_to be_valid expect(invalid.errors.full_messages).to include("some error") expect(invalid.errors.full_messages).to include("some error") end end describe '#execute' do it 'creates a project successfully' do response = nil expect { response = subject.execute } .to change(Project, :count).by(1) expect(response).to be_success expect(response.http_status).to eq(:ok) expect(response.payload).to be_instance_of(Project) expect(response.payload.name).to eq('name') expect(response.payload.path).to eq('path') expect(response.payload.namespace).to eq(user.namespace) project = Project.last expect(project.name).to eq('name') expect(project.path).to eq('path') expect(project.namespace_id).to eq(user.namespace.id) expect(project.import_type).to eq('gitlab_project') end context 'when the project creation raises an error' do it 'fails to create a project' do expect_next_instance_of(Projects::GitlabProjectsImportService) do |service| expect(service).to receive(:execute).and_raise(StandardError, "failed to create project") end response = nil expect { response = subject.execute } .to change(Project, :count).by(0) expect(response).to be_error expect(response.http_status).to eq(:bad_request) expect(response.message).to eq("failed to create project") expect(response.payload).to eq(other_errors: []) end end context 'when the validation fail' do it 'fails to create a project' do params.delete(:path) response = nil expect { response = subject.execute } .to change(Project, :count).by(0) expect(response).to be_error expect(response.http_status).to eq(:bad_request) expect(response.message).to eq("Path can't be blank") expect(response.payload).to eq(other_errors: []) end context 'when the project contains multiple errors' do it 'fails to create a project' do params.merge!(name: '_ an invalid name _', path: '_ an invalid path _') response = nil expect { response = subject.execute } .to change(Project, :count).by(0) expect(response).to be_error expect(response.http_status).to eq(:bad_request) expect(response.message).to eq( 'Project namespace path must not start or end with a special character and must not contain consecutive ' \ 'special characters.' ) expect(response.payload).to eq( other_errors: [ %(Project namespace path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'), %(Path can contain only letters, digits, '_', '-' and '.'. Cannot start with '-', end in '.git' or end in '.atom'), %(Path must not start or end with a special character and must not contain consecutive special characters.) ]) end end end context 'when the strategy adds project parameters' do before do expect_next_instance_of(FakeStrategy) do |strategy| expect(strategy).to receive(:project_params).and_return(name: 'the strategy name') end subject.valid? end it 'merges the strategy project parameters' do response = nil expect { response = subject.execute } .to change(Project, :count).by(1) expect(response).to be_success expect(response.http_status).to eq(:ok) expect(response.payload).to be_instance_of(Project) expect(response.payload.name).to eq('the strategy name') expect(response.payload.path).to eq('path') expect(response.payload.namespace).to eq(user.namespace) project = Project.last expect(project.name).to eq('the strategy name') expect(project.path).to eq('path') expect(project.namespace_id).to eq(user.namespace.id) expect(project.import_type).to eq('gitlab_project') end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import module GitlabProjects module FileAcquisitionStrategies class FileUpload include ActiveModel::Validations validate :uploaded_file def initialize(params:, current_user: nil) @params = params end def project_params @project_params ||= @params.slice(:file) end def file @file ||= @params[:file] end private def uploaded_file return if file.present? && file.is_a?(UploadedFile) errors.add(:file, 'must be uploaded') end end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::FileUpload, :aggregate_failures, feature_category: :importers do let(:file) { UploadedFile.new(File.join('spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz')) } describe 'validation' do it 'validates presence of file' do valid = described_class.new(params: { file: file }) expect(valid).to be_valid invalid = described_class.new(params: {}) expect(invalid).not_to be_valid expect(invalid.errors.full_messages).to include("File must be uploaded") end end describe '#project_params' do it 'returns the file to upload in the params' do subject = described_class.new(params: { file: file }) expect(subject.project_params).to eq(file: file) end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import module GitlabProjects module FileAcquisitionStrategies class RemoteFileS3 include ActiveModel::Validations include Gitlab::Utils::StrongMemoize def self.allow_local_requests? ::Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services? end validates_presence_of :region, :bucket_name, :file_key, :access_key_id, :secret_access_key validates :file_url, addressable_url: { schemes: %w[https], allow_localhost: allow_local_requests?, allow_local_network: allow_local_requests?, dns_rebind_protection: true } validates_with RemoteFileValidator # The import itself has a limit of 24h, since the URL is created before the import starts # we add an expiration a bit longer to ensure it won't expire during the import. URL_EXPIRATION = 28.hours.seconds def initialize(params:, current_user: nil) @params = params end def project_params @project_parms ||= { import_export_upload: ::ImportExportUpload.new(remote_import_url: file_url) } end def file_url @file_url ||= s3_object&.presigned_url(:get, expires_in: URL_EXPIRATION.to_i) end def content_type @content_type ||= s3_object&.content_type end def content_length @content_length ||= s3_object&.content_length.to_i end # Make the validated params/methods accessible def read_attribute_for_validation(key) return file_url if key == :file_url params[key] end private attr_reader :params def s3_object strong_memoize(:s3_object) do build_s3_options end end def build_s3_options object = Aws::S3::Object.new( params[:bucket_name], params[:file_key], client: Aws::S3::Client.new( region: params[:region], access_key_id: params[:access_key_id], secret_access_key: params[:secret_access_key] ) ) # Force validate if the object exists and is accessible # Some exceptions are only raised when trying to access the object data unless object.exists? errors.add(:base, "File not found '#{params[:file_key]}' in '#{params[:bucket_name]}'") return end object rescue StandardError => e errors.add(:base, "Failed to open '#{params[:file_key]}' in '#{params[:bucket_name]}': #{e.message}") nil end end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFileS3, :aggregate_failures, feature_category: :importers do let(:region_name) { 'region_name' } let(:bucket_name) { 'bucket_name' } let(:file_key) { 'file_key' } let(:access_key_id) { 'access_key_id' } let(:secret_access_key) { 'secret_access_key' } let(:file_exists) { true } let(:content_type) { 'application/x-tar' } let(:content_length) { 10.megabytes } let(:presigned_url) { 'https://external.file.path/file.tar.gz?PRESIGNED=true&TOKEN=some-token' } let(:s3_double) do instance_double( Aws::S3::Object, exists?: file_exists, content_type: content_type, content_length: content_length, presigned_url: presigned_url ) end let(:params) do { region: region_name, bucket_name: bucket_name, file_key: file_key, access_key_id: access_key_id, secret_access_key: secret_access_key } end subject { described_class.new(params: params) } before do # Avoid network requests expect(Aws::S3::Client).to receive(:new).and_return(double) expect(Aws::S3::Object).to receive(:new).and_return(s3_double) stub_application_setting(max_import_remote_file_size: 10) end describe 'validation' do it { expect(subject).to be_valid } %i[region bucket_name file_key access_key_id secret_access_key].each do |key| context "#{key} validation" do before do params[key] = nil end it "validates presence of #{key}" do expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include("#{key.to_s.humanize} can't be blank") end end end context 'content-length validation' do let(:content_length) { 11.megabytes } it 'validates the remote content-length' do expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include('Content length is too big (should be at most 10 MiB)') end end context 'content-type validation' do let(:content_type) { 'unknown' } it 'validates the remote content-type' do expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include("Content type 'unknown' not allowed. (Allowed: application/gzip, application/x-tar, application/x-gzip)") end end context 'file_url validation' do let(:presigned_url) { 'ftp://invalid.url/file.tar.gz' } it 'validates the file_url scheme' do expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include("File url is blocked: Only allowed schemes are https") end context 'when localhost urls are not allowed' do let(:presigned_url) { 'https://localhost:3000/file.tar.gz' } it 'validates the file_url' do stub_application_setting(allow_local_requests_from_web_hooks_and_services: false) expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include("File url is blocked: Requests to localhost are not allowed") end end end context 'when the remote file does not exist' do it 'foo' do expect(s3_double).to receive(:exists?).and_return(false) expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include("File not found 'file_key' in 'bucket_name'") end end context 'when it fails to build the s3 object' do it 'foo' do expect(s3_double).to receive(:exists?).and_raise(StandardError, "some error") expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include("Failed to open 'file_key' in 'bucket_name': some error") end end end describe '#project_params' do it 'returns import_export_upload in the params' do subject = described_class.new(params: { remote_import_url: presigned_url }) expect(subject.project_params).to match( import_export_upload: an_instance_of(::ImportExportUpload) ) expect(subject.project_params[:import_export_upload]).to have_attributes( remote_import_url: presigned_url ) end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import module GitlabProjects module FileAcquisitionStrategies class RemoteFile include ActiveModel::Validations def self.allow_local_requests? ::Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services? end validates :file_url, addressable_url: { schemes: %w[https], allow_localhost: allow_local_requests?, allow_local_network: allow_local_requests?, dns_rebind_protection: true } validates_with RemoteFileValidator, if: -> { !s3_request? } def initialize(params:, current_user: nil) @params = params end def project_params @project_parms ||= { import_export_upload: ::ImportExportUpload.new(remote_import_url: file_url) } end def file_url @file_url ||= params[:remote_import_url] end def content_type @content_type ||= headers['content-type'] end def content_length @content_length ||= headers['content-length'].to_i end private attr_reader :params def s3_request? headers['Server'] == 'AmazonS3' && headers['x-amz-request-id'].present? end def headers return {} if file_url.blank? @headers ||= Gitlab::HTTP.head(file_url, timeout: 1.second).headers rescue StandardError => e errors.add(:base, "Failed to retrive headers: #{e.message}") {} end end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ::Import::GitlabProjects::FileAcquisitionStrategies::RemoteFile, :aggregate_failures, feature_category: :importers do let(:remote_url) { 'https://external.file.path/file.tar.gz' } let(:params) { { remote_import_url: remote_url } } subject { described_class.new(params: params) } before do stub_application_setting(max_import_remote_file_size: 10) stub_headers_for(remote_url, { 'content-length' => 10.megabytes, 'content-type' => 'application/gzip' }) end describe 'validation' do it { expect(subject).to be_valid } context 'file_url validation' do let(:remote_url) { 'ftp://invalid.url/file.tar.gz' } it 'validates the file_url scheme' do expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include("File url is blocked: Only allowed schemes are https") end context 'when localhost urls are not allowed' do let(:remote_url) { 'https://localhost:3000/file.tar.gz' } it 'validates the file_url' do stub_application_setting(allow_local_requests_from_web_hooks_and_services: false) expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include("File url is blocked: Requests to localhost are not allowed") end end end context 'when the HTTP request fails to recover the headers' do it 'adds the error message' do expect(Gitlab::HTTP) .to receive(:head) .and_raise(StandardError, 'request invalid') expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include('Failed to retrive headers: request invalid') end end context 'when request is not from an S3 server' do it 'validates the remote content-length' do stub_application_setting(max_import_remote_file_size: 1) expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include('Content length is too big (should be at most 1 MiB)') end it 'validates the remote content-type' do stub_headers_for(remote_url, { 'content-type' => 'unknown' }) expect(subject).not_to be_valid expect(subject.errors.full_messages) .to include("Content type 'unknown' not allowed. (Allowed: application/gzip, application/x-tar, application/x-gzip)") end end context 'when request is from an S3 server' do it 'does not validate the remote content-length or content-type' do stub_headers_for( remote_url, 'Server' => 'AmazonS3', 'x-amz-request-id' => 'some-id', 'content-length' => 11.gigabytes, 'content-type' => 'unknown' ) expect(subject).to be_valid end end end describe '#project_params' do it 'returns import_export_upload in the params' do subject = described_class.new(params: { remote_import_url: remote_url }) expect(subject.project_params).to match( import_export_upload: an_instance_of(::ImportExportUpload) ) expect(subject.project_params[:import_export_upload]).to have_attributes( remote_import_url: remote_url ) end end def stub_headers_for(url, headers = {}) allow(Gitlab::HTTP) .to receive(:head) .with(remote_url, timeout: 1.second) .and_return(double(headers: headers)) # rubocop: disable RSpec/VerifiedDoubles end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import module Github class GistsImportService < ::BaseService def initialize(user, client, params) @current_user = user @params = params @client = client end def execute return error('Import already in progress', 422) if import_status.started? check_user_token start_import success rescue Octokit::Unauthorized error('Access denied to the GitHub account.', 401) end private def import_status @import_status ||= Gitlab::GithubGistsImport::Status.new(current_user.id) end def encrypted_token Gitlab::CryptoHelper.aes256_gcm_encrypt(params[:github_access_token]) end def start_import Gitlab::GithubGistsImport::StartImportWorker.perform_async(current_user.id, encrypted_token) import_status.start! end def check_user_token @client.octokit.user.present? end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Import::Github::GistsImportService, feature_category: :importers do subject(:import) { described_class.new(user, client, params) } let_it_be(:user) { create(:user) } let(:params) { { github_access_token: 'token' } } let(:import_status) { instance_double('Gitlab::GithubGistsImport::Status') } let(:client) { Gitlab::GithubImport::Client.new(params[:github_access_token]) } let(:octokit_user) { { login: 'user_login' } } describe '#execute', :aggregate_failures do before do allow(Gitlab::GithubGistsImport::Status).to receive(:new).and_return(import_status) allow(client.octokit).to receive(:user).and_return(octokit_user) end context 'when import in progress' do let(:expected_result) do { status: :error, http_status: 422, message: 'Import already in progress' } end it 'returns error' do expect(import_status).to receive(:started?).and_return(true) expect(import.execute).to eq(expected_result) end end context 'when import was not started' do it 'returns success' do encrypted_token = Gitlab::CryptoHelper.aes256_gcm_encrypt(params[:github_access_token]) expect(import_status).to receive(:started?).and_return(false) expect(Gitlab::CryptoHelper) .to receive(:aes256_gcm_encrypt).with(params[:github_access_token]) .and_return(encrypted_token) expect(Gitlab::GithubGistsImport::StartImportWorker) .to receive(:perform_async).with(user.id, encrypted_token) expect(import_status).to receive(:start!) expect(import.execute).to eq({ status: :success }) end end context 'when user token is invalid' do before do allow(client.octokit).to receive(:user).and_raise(Octokit::Unauthorized) allow(import_status).to receive(:started?).and_return(false) end let(:expected_result) do { http_status: 401, message: 'Access denied to the GitHub account.', status: :error } end it 'returns 401 error' do expect(import.execute).to eq(expected_result) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import module Github class CancelProjectImportService < ::BaseService def execute return error('Not Found', :not_found) unless authorized_to_read? return error('Unauthorized access', :forbidden) unless authorized_to_cancel? if project.import_state.completed? error(cannot_cancel_error_message, :bad_request) else project.import_state.cancel metrics.track_canceled_import success(project: project) end end private def authorized_to_read? can?(current_user, :read_project, project) end def authorized_to_cancel? can?(current_user, :owner_access, project) end def cannot_cancel_error_message format( _('The import cannot be canceled because it is %{project_status}'), project_status: project.import_state.status ) end def metrics @metrics ||= Gitlab::Import::Metrics.new(:github_importer, project) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Import::Github::CancelProjectImportService, feature_category: :importers do subject(:import_cancel) { described_class.new(project, project.owner) } let_it_be(:user) { create(:user) } let_it_be_with_reload(:project) { create(:project, :import_started, import_type: 'github', import_url: 'https://fake.url') } describe '.execute' do context 'when user is an owner' do context 'when import is in progress' do it 'update import state to be canceled' do expect(import_cancel.execute).to eq({ status: :success, project: project }) end it 'tracks canceled imports' do metrics_double = instance_double('Gitlab::Import::Metrics') expect(Gitlab::Import::Metrics) .to receive(:new) .with(:github_importer, project) .and_return(metrics_double) expect(metrics_double).to receive(:track_canceled_import) import_cancel.execute end end context 'when import is finished' do let(:expected_result) do { status: :error, http_status: :bad_request, message: 'The import cannot be canceled because it is finished' } end before do project.import_state.finish! end it 'returns error' do expect(import_cancel.execute).to eq(expected_result) end end end context 'when user is not allowed to read project' do it 'returns 404' do expect(described_class.new(project, user).execute) .to eq({ status: :error, http_status: :not_found, message: 'Not Found' }) end end context 'when user is not allowed to cancel project' do before do project.add_developer(user) end it 'returns 403' do expect(described_class.new(project, user).execute) .to eq({ status: :error, http_status: :forbidden, message: 'Unauthorized access' }) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Import module Github module Notes class CreateService < ::Notes::CreateService # Github does not have support to quick actions in notes (like /assign) # Therefore, when importing notes we skip the quick actions processing def quick_actions_supported?(_note) false end end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Import::Github::Notes::CreateService, feature_category: :importers do it 'does not support quick actions' do project = create(:project, :repository) user = create(:user) merge_request = create(:merge_request, source_project: project) project.add_maintainer(user) note = described_class.new( project, user, note: '/close', noteable_type: 'MergeRequest', noteable_id: merge_request.id ).execute expect(note.note).to eq('/close') expect(note.noteable.closed?).to be(false) end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module QuickActions class TargetService < BaseService def execute(type, type_iid) case type&.downcase when 'workitem' work_item(type_iid) when 'issue' issue(type_iid) when 'mergerequest' merge_request(type_iid) when 'commit' commit(type_iid) end end private # rubocop: disable CodeReuse/ActiveRecord def work_item(type_iid) WorkItems::WorkItemsFinder.new(current_user, project_id: project.id).find_by(iid: type_iid) end # rubocop: enable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord def issue(type_iid) return project.issues.build if type_iid.nil? IssuesFinder.new(current_user, project_id: project.id).find_by(iid: type_iid) || project.issues.build end # rubocop: enable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord def merge_request(type_iid) return project.merge_requests.build if type_iid.nil? MergeRequestsFinder.new(current_user, project_id: project.id).find_by(iid: type_iid) || project.merge_requests.build end # rubocop: enable CodeReuse/ActiveRecord def commit(type_iid) project.commit(type_iid) end end end QuickActions::TargetService.prepend_mod_with('QuickActions::TargetService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe QuickActions::TargetService, feature_category: :team_planning do let(:project) { create(:project) } let(:user) { create(:user) } let(:service) { described_class.new(project, user) } before do project.add_maintainer(user) end describe '#execute' do shared_examples 'no target' do |type_iid:| it 'returns nil' do target = service.execute(type, type_iid) expect(target).to be_nil end end shared_examples 'find target' do it 'returns the target' do found_target = service.execute(type, target_iid) expect(found_target).to eq(target) end end shared_examples 'build target' do |type_iid:| it 'builds a new target' do target = service.execute(type, type_iid) expect(target.project).to eq(project) expect(target).to be_new_record end end context 'for issue' do let(:target) { create(:issue, project: project) } let(:target_iid) { target.iid } let(:type) { 'Issue' } it_behaves_like 'find target' it_behaves_like 'build target', type_iid: nil it_behaves_like 'build target', type_iid: -1 end context 'for work item' do let(:target) { create(:work_item, :task, project: project) } let(:target_iid) { target.iid } let(:type) { 'WorkItem' } it_behaves_like 'find target' end context 'for merge request' do let(:target) { create(:merge_request, source_project: project) } let(:target_iid) { target.iid } let(:type) { 'MergeRequest' } it_behaves_like 'find target' it_behaves_like 'build target', type_iid: nil it_behaves_like 'build target', type_iid: -1 end context 'for commit' do let(:project) { create(:project, :repository) } let(:target) { project.commit.parent } let(:target_iid) { target.sha } let(:type) { 'Commit' } it_behaves_like 'find target' it_behaves_like 'no target', type_iid: 'invalid_sha' context 'with nil target_iid' do let(:target) { project.commit } let(:target_iid) { nil } it_behaves_like 'find target' end end context 'for unknown type' do let(:type) { 'unknown' } it_behaves_like 'no target', type_iid: :unused end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module QuickActions class InterpretService < BaseService include Gitlab::Utils::StrongMemoize include Gitlab::QuickActions::Dsl include Gitlab::QuickActions::IssueActions include Gitlab::QuickActions::IssuableActions include Gitlab::QuickActions::IssueAndMergeRequestActions include Gitlab::QuickActions::MergeRequestActions include Gitlab::QuickActions::CommitActions include Gitlab::QuickActions::CommonActions include Gitlab::QuickActions::RelateActions include Gitlab::QuickActions::WorkItemActions attr_reader :quick_action_target # Counts how many commands have been executed. # Used to display relevant feedback on UI when a note # with only commands has been processed. attr_accessor :commands_executed_count # Takes an quick_action_target and returns an array of all the available commands # represented with .to_h def available_commands(quick_action_target) @quick_action_target = quick_action_target self.class.command_definitions.map do |definition| next unless definition.available?(self) definition.to_h(self) end.compact end # Takes a text and interprets the commands that are extracted from it. # Returns the content without commands, a hash of changes to be applied to a record # and a string containing the execution_message to show to the user. def execute(content, quick_action_target, only: nil) return [content, {}, ''] unless current_user.can?(:use_quick_actions) @quick_action_target = quick_action_target @updates = {} @execution_message = {} content, commands = extractor.extract_commands(content, only: only) extract_updates(commands) [content, @updates, execution_messages_for(commands), command_names(commands)] end # Takes a text and interprets the commands that are extracted from it. # Returns the content without commands, and array of changes explained. # `keep_actions: true` will keep the quick actions in the content. def explain(content, quick_action_target, keep_actions: false) return [content, []] unless current_user.can?(:use_quick_actions) @quick_action_target = quick_action_target content, commands = extractor(keep_actions).extract_commands(content) commands = explain_commands(commands) [content, commands] end private def failed_parse(message) raise Gitlab::QuickActions::CommandDefinition::ParseError, message end def extractor(keep_actions = false) Gitlab::QuickActions::Extractor.new(self.class.command_definitions, keep_actions: keep_actions) end # Find users for commands like /assign # # eg. /assign me and @jane and jack def extract_users(params) Gitlab::QuickActions::UsersExtractor .new(current_user, project: project, group: group, target: quick_action_target, text: params) .execute rescue Gitlab::QuickActions::UsersExtractor::Error => err extract_users_failed(err) end def extract_users_failed(err) case err when Gitlab::QuickActions::UsersExtractor::MissingError failed_parse(format(_("Failed to find users for %{missing}"), missing: err.message)) when Gitlab::QuickActions::UsersExtractor::TooManyRefsError failed_parse(format(_('Too many references. Quick actions are limited to at most %{max_count} user references'), max_count: err.limit)) when Gitlab::QuickActions::UsersExtractor::TooManyFoundError failed_parse(format(_("Too many users found. Quick actions are limited to at most %{max_count} users"), max_count: err.limit)) else Gitlab::ErrorTracking.track_and_raise_for_dev_exception(err) failed_parse(_('Something went wrong')) end end def find_milestones(project, params = {}) group_ids = project.group.self_and_ancestors.select(:id) if project.group MilestonesFinder.new(params.merge(project_ids: [project.id], group_ids: group_ids)).execute end def parent project || group end def group strong_memoize(:group) do quick_action_target.group if quick_action_target.respond_to?(:group) end end def find_labels(labels_params = nil) extract_references(labels_params, :label) | find_labels_by_name_no_tilde(labels_params) end def find_labels_by_name_no_tilde(labels_params) return Label.none if label_with_tilde?(labels_params) finder_params = { include_ancestor_groups: true } finder_params[:project_id] = project.id if project finder_params[:group_id] = group.id if group finder_params[:name] = extract_label_names(labels_params) if labels_params LabelsFinder.new(current_user, finder_params).execute end def label_with_tilde?(labels_params) labels_params&.include?('~') end def extract_label_names(labels_params) # '"A" "A B C" A B' => ["A", "A B C", "A", "B"] labels_params.scan(/"([^"]+)"|([^ ]+)/).flatten.compact end def find_label_references(labels_param, format = :id) labels_to_reference(find_labels(labels_param), format) end def labels_to_reference(labels, format = :id) labels.map { |l| l.to_reference(format: format) } end def find_label_ids(labels_param) find_labels(labels_param).map(&:id) end def explain_commands(commands) map_commands(commands, :explain) end def execution_messages_for(commands) map_commands(commands, :execute_message).join(' ') end def map_commands(commands, method) commands.map do |name_or_alias, arg| definition = self.class.definition_by_name(name_or_alias) next unless definition case method when :explain definition.explain(self, arg) when :execute_message @execution_message[definition.name.to_sym] || definition.execute_message(self, arg) end end.compact end def command_names(commands) commands.flatten.map do |name| definition = self.class.definition_by_name(name) next unless definition name end.compact end def extract_updates(commands) commands.each do |name, arg| definition = self.class.definition_by_name(name) next unless definition definition.execute(self, arg) usage_ping_tracking(definition.name, arg) end end # rubocop: disable CodeReuse/ActiveRecord def extract_references(arg, type) return [] unless arg ext = Gitlab::ReferenceExtractor.new(project, current_user) ext.analyze(arg, author: current_user, group: group) ext.references(type) end # rubocop: enable CodeReuse/ActiveRecord def usage_ping_tracking(quick_action_name, arg) Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter.track_unique_action( quick_action_name.to_s, args: arg&.strip, user: current_user ) end def can?(ability, object) Ability.allowed?(current_user, ability, object) end end end QuickActions::InterpretService.prepend_mod_with('QuickActions::InterpretService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe QuickActions::InterpretService, feature_category: :team_planning do include AfterNextHelpers let_it_be(:group) { create(:group, :crm_enabled) } let_it_be(:public_project) { create(:project, :public, group: group) } let_it_be(:repository_project) { create(:project, :repository) } let_it_be(:project) { public_project } let_it_be(:developer) { create(:user) } let_it_be(:developer2) { create(:user) } let_it_be(:developer3) { create(:user) } let_it_be_with_reload(:issue) { create(:issue, project: project) } let_it_be(:inprogress) { create(:label, project: project, title: 'In Progress') } let_it_be(:helmchart) { create(:label, project: project, title: 'Helm Chart Registry') } let_it_be(:bug) { create(:label, project: project, title: 'Bug') } let(:milestone) { create(:milestone, project: project, title: '9.10') } let(:commit) { create(:commit, project: project) } let(:current_user) { developer } subject(:service) { described_class.new(project, current_user) } before_all do public_project.add_developer(developer) repository_project.add_developer(developer) end before do stub_licensed_features( multiple_issue_assignees: false, multiple_merge_request_reviewers: false, multiple_merge_request_assignees: false ) end describe '#execute' do let_it_be(:work_item) { create(:work_item, :task, project: project) } let(:merge_request) { create(:merge_request, source_project: project) } shared_examples 'reopen command' do it 'returns state_event: "reopen" if content contains /reopen' do issuable.close! _, updates, _ = service.execute(content, issuable) expect(updates).to eq(state_event: 'reopen') end it 'returns the reopen message' do issuable.close! _, _, message = service.execute(content, issuable) expect(message).to eq("Reopened this #{issuable.to_ability_name.humanize(capitalize: false)}.") end end shared_examples 'close command' do it 'returns state_event: "close" if content contains /close' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(state_event: 'close') end it 'returns the close message' do _, _, message = service.execute(content, issuable) expect(message).to eq("Closed this #{issuable.to_ability_name.humanize(capitalize: false)}.") end end shared_examples 'title command' do it 'populates title: "A brand new title" if content contains /title A brand new title' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(title: 'A brand new title') end it 'returns the title message' do _, _, message = service.execute(content, issuable) expect(message).to eq(%(Changed the title to "A brand new title".)) end end shared_examples 'milestone command' do it 'fetches milestone and populates milestone_id if content contains /milestone' do milestone # populate the milestone _, updates, _ = service.execute(content, issuable) expect(updates).to eq(milestone_id: milestone.id) end it 'returns the milestone message' do milestone # populate the milestone _, _, message = service.execute(content, issuable) expect(message).to eq("Set the milestone to #{milestone.to_reference}.") end it 'returns empty milestone message when milestone is wrong' do _, _, message = service.execute('/milestone %wrong-milestone', issuable) expect(message).to be_empty end end shared_examples 'remove_milestone command' do it 'populates milestone_id: nil if content contains /remove_milestone' do issuable.update!(milestone_id: milestone.id) _, updates, _ = service.execute(content, issuable) expect(updates).to eq(milestone_id: nil) end it 'returns removed milestone message' do issuable.update!(milestone_id: milestone.id) _, _, message = service.execute(content, issuable) expect(message).to eq("Removed #{milestone.to_reference} milestone.") end end shared_examples 'label command' do it 'fetches label ids and populates add_label_ids if content contains /label' do bug # populate the label inprogress # populate the label _, updates, _ = service.execute(content, issuable) expect(updates).to match(add_label_ids: contain_exactly(bug.id, inprogress.id)) end it 'returns the label message' do bug # populate the label inprogress # populate the label _, _, message = service.execute(content, issuable) # Compare message without making assumptions about ordering. expect(message).to match %r{Added ~".*" ~".*" labels.} expect(message).to include(bug.to_reference(format: :name)) expect(message).to include(inprogress.to_reference(format: :name)) end end shared_examples 'multiple label command' do it 'fetches label ids and populates add_label_ids if content contains multiple /label' do bug # populate the label inprogress # populate the label _, updates, _ = service.execute(content, issuable) expect(updates).to eq(add_label_ids: [inprogress.id, bug.id]) end end shared_examples 'multiple label with same argument' do it 'prevents duplicate label ids and populates add_label_ids if content contains multiple /label' do inprogress # populate the label _, updates, _ = service.execute(content, issuable) expect(updates).to eq(add_label_ids: [inprogress.id]) end end shared_examples 'multiword label name starting without ~' do it 'fetches label ids and populates add_label_ids if content contains /label' do _, updates = service.execute(content, issuable) expect(updates).to eq(add_label_ids: [helmchart.id]) end end shared_examples 'label name is included in the middle of another label name' do it 'ignores the sublabel when the content contains the includer label name' do create(:label, project: project, title: 'Chart') _, updates = service.execute(content, issuable) expect(updates).to eq(add_label_ids: [helmchart.id]) end end shared_examples 'unlabel command' do it 'fetches label ids and populates remove_label_ids if content contains /unlabel' do issuable.update!(label_ids: [inprogress.id]) # populate the label _, updates, _ = service.execute(content, issuable) expect(updates).to eq(remove_label_ids: [inprogress.id]) end it 'returns the unlabel message' do issuable.update!(label_ids: [inprogress.id]) # populate the label _, _, message = service.execute(content, issuable) expect(message).to eq("Removed #{inprogress.to_reference(format: :name)} label.") end end shared_examples 'multiple unlabel command' do it 'fetches label ids and populates remove_label_ids if content contains mutiple /unlabel' do issuable.update!(label_ids: [inprogress.id, bug.id]) # populate the label _, updates, _ = service.execute(content, issuable) expect(updates).to eq(remove_label_ids: [inprogress.id, bug.id]) end end shared_examples 'unlabel command with no argument' do it 'populates label_ids: [] if content contains /unlabel with no arguments' do issuable.update!(label_ids: [inprogress.id]) # populate the label _, updates, _ = service.execute(content, issuable) expect(updates).to eq(label_ids: []) end end shared_examples 'relabel command' do it 'populates label_ids: [] if content contains /relabel' do issuable.update!(label_ids: [bug.id]) # populate the label inprogress # populate the label _, updates, _ = service.execute(content, issuable) expect(updates).to eq(label_ids: [inprogress.id]) end it 'returns the relabel message' do issuable.update!(label_ids: [bug.id]) # populate the label inprogress # populate the label _, _, message = service.execute(content, issuable) expect(message).to eq("Replaced all labels with #{inprogress.to_reference(format: :name)} label.") end end shared_examples 'todo command' do it 'populates todo_event: "add" if content contains /todo' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(todo_event: 'add') end it 'returns the todo message' do _, _, message = service.execute(content, issuable) expect(message).to eq('Added a to do.') end end shared_examples 'done command' do it 'populates todo_event: "done" if content contains /done' do TodoService.new.mark_todo(issuable, developer) _, updates, _ = service.execute(content, issuable) expect(updates).to eq(todo_event: 'done') end it 'returns the done message' do TodoService.new.mark_todo(issuable, developer) _, _, message = service.execute(content, issuable) expect(message).to eq('Marked to do as done.') end end shared_examples 'subscribe command' do it 'populates subscription_event: "subscribe" if content contains /subscribe' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(subscription_event: 'subscribe') end it 'returns the subscribe message' do _, _, message = service.execute(content, issuable) expect(message).to eq("Subscribed to this #{issuable.to_ability_name.humanize(capitalize: false)}.") end end shared_examples 'unsubscribe command' do it 'populates subscription_event: "unsubscribe" if content contains /unsubscribe' do issuable.subscribe(developer, project) _, updates, _ = service.execute(content, issuable) expect(updates).to eq(subscription_event: 'unsubscribe') end it 'returns the unsubscribe message' do issuable.subscribe(developer, project) _, _, message = service.execute(content, issuable) expect(message).to eq("Unsubscribed from this #{issuable.to_ability_name.humanize(capitalize: false)}.") end end shared_examples 'due command' do let(:expected_date) { Date.new(2016, 8, 28) } it 'populates due_date: Date.new(2016, 8, 28) if content contains /due 2016-08-28' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(due_date: expected_date) end it 'returns due_date message: Date.new(2016, 8, 28) if content contains /due 2016-08-28' do _, _, message = service.execute(content, issuable) expect(message).to eq("Set the due date to #{expected_date.to_fs(:medium)}.") end end shared_examples 'remove_due_date command' do before do issuable.update!(due_date: Date.today) end it 'populates due_date: nil if content contains /remove_due_date' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(due_date: nil) end it 'returns Removed the due date' do _, _, message = service.execute(content, issuable) expect(message).to eq('Removed the due date.') end end shared_examples 'draft command' do it 'returns wip_event: "draft"' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(wip_event: 'draft') end it 'returns the draft message' do _, _, message = service.execute(content, issuable) expect(message).to eq("Marked this #{issuable.to_ability_name.humanize(capitalize: false)} as a draft.") end end shared_examples 'draft/ready command no action' do it 'returns the no action message if there is no change to the status' do _, _, message = service.execute(content, issuable) expect(message).to eq("No change to this #{issuable.to_ability_name.humanize(capitalize: false)}'s draft status.") end end shared_examples 'ready command' do it 'returns wip_event: "ready"' do issuable.update!(title: issuable.draft_title) _, updates, _ = service.execute(content, issuable) expect(updates).to eq(wip_event: 'ready') end it 'returns the ready message' do issuable.update!(title: issuable.draft_title) _, _, message = service.execute(content, issuable) expect(message).to eq("Marked this #{issuable.to_ability_name.humanize(capitalize: false)} as ready.") end end shared_examples 'estimate command' do it 'populates time_estimate: 3600 if content contains /estimate 1h' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(time_estimate: 3600) end it 'returns the time_estimate formatted message' do _, _, message = service.execute('/estimate 79d', issuable) expect(message).to eq('Set time estimate to 3mo 3w 4d.') end end shared_examples 'spend command' do it 'populates spend_time: 3600 if content contains /spend 1h' do freeze_time do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(spend_time: { duration: 3600, user_id: developer.id, spent_at: DateTime.current }) end end end shared_examples 'spend command with negative time' do it 'populates spend_time: -7200 if content contains -120m' do freeze_time do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(spend_time: { duration: -7200, user_id: developer.id, spent_at: DateTime.current }) end end it 'returns the spend_time message including the formatted duration and verb' do _, _, message = service.execute(content, issuable) expect(message).to eq('Subtracted 2h spent time.') end end shared_examples 'spend command with valid date' do it 'populates spend time: 1800 with date in date type format' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(spend_time: { duration: 1800, user_id: developer.id, spent_at: Date.parse(date) }) end end shared_examples 'spend command with invalid date' do it 'will not create any note and timelog' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq({}) end end shared_examples 'spend command with future date' do it 'will not create any note and timelog' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq({}) end end shared_examples 'remove_estimate command' do it 'populates time_estimate: 0 if content contains /remove_estimate' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(time_estimate: 0) end it 'returns the remove_estimate message' do _, _, message = service.execute(content, issuable) expect(message).to eq('Removed time estimate.') end end shared_examples 'remove_time_spent command' do it 'populates spend_time: :reset if content contains /remove_time_spent' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(spend_time: { duration: :reset, user_id: developer.id }) end it 'returns the remove_time_spent message' do _, _, message = service.execute(content, issuable) expect(message).to eq('Removed spent time.') end end shared_examples 'lock command' do let(:issue) { create(:issue, project: project, discussion_locked: false) } let(:merge_request) { create(:merge_request, source_project: project, discussion_locked: false) } it 'returns discussion_locked: true if content contains /lock' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(discussion_locked: true) end it 'returns the lock discussion message' do _, _, message = service.execute(content, issuable) expect(message).to eq('Locked the discussion.') end end shared_examples 'unlock command' do let(:issue) { create(:issue, project: project, discussion_locked: true) } let(:merge_request) { create(:merge_request, source_project: project, discussion_locked: true) } it 'returns discussion_locked: true if content contains /unlock' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(discussion_locked: false) end it 'returns the unlock discussion message' do _, _, message = service.execute(content, issuable) expect(message).to eq('Unlocked the discussion.') end end shared_examples 'failed command' do |error_msg| let(:match_msg) { error_msg ? eq(error_msg) : be_empty } it 'populates {} if content contains an unsupported command' do _, updates, _ = service.execute(content, issuable) expect(updates).to be_empty end it "returns #{error_msg || 'an empty'} message" do _, _, message = service.execute(content, issuable) expect(message).to match_msg end end shared_examples 'merge immediately command' do let(:project) { repository_project } it 'runs merge command if content contains /merge' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(merge: merge_request.diff_head_sha) end it 'returns them merge message' do _, _, message = service.execute(content, issuable) expect(message).to eq('Merged this merge request.') end end shared_examples 'merge automatically command' do let(:project) { repository_project } before do stub_licensed_features(merge_request_approvers: true) if Gitlab.ee? end it 'runs merge command if content contains /merge and returns merge message' do _, updates, message = service.execute(content, issuable) expect(updates).to eq(merge: merge_request.diff_head_sha) if Gitlab.ee? expect(message).to eq('Scheduled to merge this merge request (Merge when checks pass).') else expect(message).to eq('Scheduled to merge this merge request (Merge when pipeline succeeds).') end end end shared_examples 'award command' do it 'toggle award 100 emoji if content contains /award :100:' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(emoji_award: "100") end it 'returns the award message' do _, _, message = service.execute(content, issuable) expect(message).to eq('Toggled :100: emoji award.') end end shared_examples 'duplicate command' do it 'fetches issue and populates canonical_issue_id if content contains /duplicate issue_reference' do issue_duplicate # populate the issue _, updates, _ = service.execute(content, issuable) expect(updates).to eq(canonical_issue_id: issue_duplicate.id) end it 'returns the duplicate message' do _, _, message = service.execute(content, issuable) expect(message).to eq("Closed this issue. Marked as related to, and a duplicate of, #{issue_duplicate.to_reference(project)}.") end it 'includes duplicate reference' do _, explanations = service.explain(content, issuable) expect(explanations).to eq(["Closes this issue. Marks as related to, and a duplicate of, #{issue_duplicate.to_reference(project)}."]) end end shared_examples 'copy_metadata command' do it 'fetches issue or merge request and copies labels and milestone if content contains /copy_metadata reference' do source_issuable # populate the issue todo_label # populate this label inreview_label # populate this label _, updates, _ = service.execute(content, issuable) expect(updates[:add_label_ids]).to match_array([inreview_label.id, todo_label.id]) if source_issuable.milestone expect(updates[:milestone_id]).to eq(source_issuable.milestone.id) else expect(updates).not_to have_key(:milestone_id) end end it 'returns the copy metadata message' do _, _, message = service.execute("/copy_metadata #{source_issuable.to_reference}", issuable) expect(message).to eq("Copied labels and milestone from #{source_issuable.to_reference}.") end end describe 'move issue command' do it 'returns the move issue message' do _, _, message = service.execute("/move #{project.full_path}", issue) expect(message).to eq("Moved this issue to #{project.full_path}.") end it 'returns move issue failure message when the referenced issue is not found' do _, _, message = service.execute('/move invalid', issue) expect(message).to eq(_("Failed to move this issue because target project doesn't exist.")) end end shared_examples 'confidential command' do it 'marks issue as confidential if content contains /confidential' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(confidential: true) end it 'returns the confidential message' do _, _, message = service.execute(content, issuable) expect(message).to eq('Made this issue confidential.') end context 'when issuable is already confidential' do before do issuable.update!(confidential: true) end it 'returns an error message' do _, _, message = service.execute(content, issuable) expect(message).to eq('Could not apply confidential command.') end it 'is not part of the available commands' do expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :confidential)) end end end shared_examples 'approve command unavailable' do it 'is not part of the available commands' do expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :approve)) end end shared_examples 'unapprove command unavailable' do it 'is not part of the available commands' do expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :unapprove)) end end shared_examples 'shrug command' do it 'appends ¯\_(ツ)_/¯ to the comment' do new_content, _, _ = service.execute(content, issuable) expect(new_content).to end_with(described_class::SHRUG) end end shared_examples 'tableflip command' do it 'appends (╯°□°)╯︵ ┻━┻ to the comment' do new_content, _, _ = service.execute(content, issuable) expect(new_content).to end_with(described_class::TABLEFLIP) end end shared_examples 'tag command' do it 'tags a commit' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(tag_name: tag_name, tag_message: tag_message) end it 'returns the tag message' do _, _, message = service.execute(content, issuable) if tag_message.present? expect(message).to eq(%(Tagged this commit to #{tag_name} with "#{tag_message}".)) else expect(message).to eq("Tagged this commit to #{tag_name}.") end end end shared_examples 'assign command' do it 'assigns to me' do cmd = '/assign me' _, updates, _ = service.execute(cmd, issuable) expect(updates).to eq(assignee_ids: [current_user.id]) end it 'does not assign to group members' do grp = create(:group) grp.add_developer(developer) grp.add_developer(developer2) cmd = "/assign #{grp.to_reference}" _, updates, message = service.execute(cmd, issuable) expect(updates).to be_blank expect(message).to include('Failed to find users') end context 'when there are too many references' do before do stub_const('Gitlab::QuickActions::UsersExtractor::MAX_QUICK_ACTION_USERS', 2) end it 'says what went wrong' do cmd = '/assign her and you, me and them' _, updates, message = service.execute(cmd, issuable) expect(updates).to be_blank expect(message).to include('Too many references. Quick actions are limited to at most 2 user references') end end context 'when the user extractor raises an uninticipated error' do before do allow_next(Gitlab::QuickActions::UsersExtractor) .to receive(:execute).and_raise(Gitlab::QuickActions::UsersExtractor::Error) end it 'tracks the exception in dev, and reports a generic message in production' do expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).twice _, updates, message = service.execute('/assign some text', issuable) expect(updates).to be_blank expect(message).to include('Something went wrong') end end it 'assigns to users with escaped underscores' do user = create(:user) base = user.username user.update!(username: "#{base}_new") issuable.project.add_developer(user) cmd = "/assign @#{base}\\_new" _, updates, _ = service.execute(cmd, issuable) expect(updates).to eq(assignee_ids: [user.id]) end it 'assigns to a single user' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(assignee_ids: [developer.id]) end it 'returns the assign message' do _, _, message = service.execute(content, issuable) expect(message).to eq("Assigned #{developer.to_reference}.") end context 'when the reference does not match the exact case' do let(:user) { create(:user) } let(:content) { "/assign #{user.to_reference.upcase}" } it 'assigns to the user' do issuable.project.add_developer(user) _, updates, message = service.execute(content, issuable) expect(content).not_to include(user.to_reference) expect(updates).to eq(assignee_ids: [user.id]) expect(message).to eq("Assigned #{user.to_reference}.") end end context 'when the user has a private profile' do let(:user) { create(:user, :private_profile) } let(:content) { "/assign #{user.to_reference}" } it 'assigns to the user' do issuable.project.add_developer(user) _, updates, message = service.execute(content, issuable) expect(updates).to eq(assignee_ids: [user.id]) expect(message).to eq("Assigned #{user.to_reference}.") end end end shared_examples 'assign_reviewer command' do it 'assigns a reviewer to a single user' do _, updates, message = service.execute(content, issuable) expect(updates).to eq(reviewer_ids: [developer.id]) expect(message).to eq("Assigned #{developer.to_reference} as reviewer.") end end shared_examples 'unassign_reviewer command' do it 'removes a single reviewer' do _, updates, message = service.execute(content, issuable) expect(updates).to eq(reviewer_ids: []) expect(message).to eq("Removed reviewer #{developer.to_reference}.") end end it_behaves_like 'reopen command' do let(:content) { '/reopen' } let(:issuable) { issue } end it_behaves_like 'reopen command' do let(:content) { '/reopen' } let(:issuable) { merge_request } end it_behaves_like 'close command' do let(:content) { '/close' } let(:issuable) { issue } end it_behaves_like 'close command' do let(:content) { '/close' } let(:issuable) { merge_request } end context 'merge command' do let(:service) { described_class.new(project, developer, { merge_request_diff_head_sha: merge_request.diff_head_sha }) } let(:merge_request) { create(:merge_request, source_project: repository_project) } it_behaves_like 'merge immediately command' do let(:content) { '/merge' } let(:issuable) { merge_request } end context 'when the head pipeline of merge request is running' do before do create(:ci_pipeline, :detached_merge_request_pipeline, merge_request: merge_request) merge_request.update_head_pipeline end it_behaves_like 'merge automatically command' do let(:content) { '/merge' } let(:issuable) { merge_request } end end context 'can not be merged when logged user does not have permissions' do let(:service) { described_class.new(project, create(:user)) } it_behaves_like 'failed command', 'Could not apply merge command.' do let(:content) { "/merge" } let(:issuable) { merge_request } end end context 'can not be merged when sha does not match' do let(:service) { described_class.new(project, developer, { merge_request_diff_head_sha: 'othersha' }) } it_behaves_like 'failed command', 'Branch has been updated since the merge was requested.' do let(:content) { "/merge" } let(:issuable) { merge_request } end end context 'when sha is missing' do let(:project) { repository_project } let(:service) { described_class.new(project, developer, {}) } it_behaves_like 'failed command', 'The `/merge` quick action requires the SHA of the head of the branch.' do let(:content) { "/merge" } let(:issuable) { merge_request } end end context 'issue can not be merged' do it_behaves_like 'failed command', 'Could not apply merge command.' do let(:content) { "/merge" } let(:issuable) { issue } end end context 'non persisted merge request cant be merged' do it_behaves_like 'failed command', 'Could not apply merge command.' do let(:content) { "/merge" } let(:issuable) { build(:merge_request) } end end context 'not persisted merge request can not be merged' do it_behaves_like 'failed command', 'Could not apply merge command.' do let(:content) { "/merge" } let(:issuable) { build(:merge_request, source_project: project) } end end end it_behaves_like 'title command' do let(:content) { '/title A brand new title' } let(:issuable) { issue } end it_behaves_like 'title command' do let(:content) { '/title A brand new title' } let(:issuable) { merge_request } end it_behaves_like 'failed command' do let(:content) { '/title' } let(:issuable) { issue } end context 'assign command with one user' do it_behaves_like 'assign command' do let(:content) { "/assign @#{developer.username}" } let(:issuable) { issue } end it_behaves_like 'assign command' do let(:content) { "/assign @#{developer.username}" } let(:issuable) { create(:incident, project: project) } end it_behaves_like 'assign command' do let(:content) { "/assign @#{developer.username}" } let(:issuable) { merge_request } end end # CE does not have multiple assignees context 'assign command with multiple assignees' do before do project.add_developer(developer2) end # There's no guarantee that the reference extractor will preserve # the order of the mentioned users since this is dependent on the # order in which rows are returned. We just ensure that at least # one of the mentioned users is assigned. shared_examples 'assigns to one of the two users' do let(:content) { "/assign @#{developer.username} @#{developer2.username}" } it 'assigns to a single user' do _, updates, message = service.execute(content, issuable) expect(updates[:assignee_ids].count).to eq(1) assignee = updates[:assignee_ids].first expect([developer.id, developer2.id]).to include(assignee) user = assignee == developer.id ? developer : developer2 expect(message).to match("Assigned #{user.to_reference}.") end end it_behaves_like 'assigns to one of the two users' do let(:content) { "/assign @#{developer.username} @#{developer2.username}" } let(:issuable) { issue } end it_behaves_like 'assigns to one of the two users' do let(:content) { "/assign @#{developer.username} @#{developer2.username}" } let(:issuable) { merge_request } end end context 'assign command with me alias' do it_behaves_like 'assign command' do let(:content) { '/assign me' } let(:issuable) { issue } end it_behaves_like 'assign command' do let(:content) { '/assign me' } let(:issuable) { merge_request } end end context 'assign command with me alias and whitespace' do it_behaves_like 'assign command' do let(:content) { '/assign me ' } let(:issuable) { issue } end it_behaves_like 'assign command' do let(:content) { '/assign me ' } let(:issuable) { merge_request } end end it_behaves_like 'failed command', 'a parse error' do let(:content) { '/assign @abcd1234' } let(:issuable) { issue } let(:match_msg) { eq "Could not apply assign command. Failed to find users for '@abcd1234'." } end it_behaves_like 'failed command', "Failed to assign a user because no user was found." do let(:content) { '/assign' } let(:issuable) { issue } end describe 'assign_reviewer command' do let(:content) { "/assign_reviewer @#{developer.username}" } let(:issuable) { merge_request } context 'with one user' do it_behaves_like 'assign_reviewer command' end context 'with an issue instead of a merge request' do let(:issuable) { issue } it_behaves_like 'failed command', 'Could not apply assign_reviewer command.' end # CE does not have multiple reviewers context 'assign command with multiple assignees' do before do project.add_developer(developer2) end # There's no guarantee that the reference extractor will preserve # the order of the mentioned users since this is dependent on the # order in which rows are returned. We just ensure that at least # one of the mentioned users is assigned. context 'assigns to one of the two users' do let(:content) { "/assign_reviewer @#{developer.username} @#{developer2.username}" } it 'assigns to a single reviewer' do _, updates, message = service.execute(content, issuable) expect(updates[:reviewer_ids].count).to eq(1) reviewer = updates[:reviewer_ids].first expect([developer.id, developer2.id]).to include(reviewer) user = reviewer == developer.id ? developer : developer2 expect(message).to match("Assigned #{user.to_reference} as reviewer.") end end end context 'with "me" alias' do let(:content) { '/assign_reviewer me' } it_behaves_like 'assign_reviewer command' end context 'with an alias and whitespace' do let(:content) { '/assign_reviewer me ' } it_behaves_like 'assign_reviewer command' end context 'with @all' do let(:content) { "/assign_reviewer @all" } it_behaves_like 'failed command', 'a parse error' do let(:match_msg) { eq "Could not apply assign_reviewer command. Failed to find users for '@all'." } end end context 'with an incorrect user' do let(:content) { '/assign_reviewer @abcd1234' } it_behaves_like 'failed command', 'a parse error' do let(:match_msg) { eq "Could not apply assign_reviewer command. Failed to find users for '@abcd1234'." } end end context 'with the "reviewer" alias' do let(:content) { "/reviewer @#{developer.username}" } it_behaves_like 'assign_reviewer command' end context 'with the "request_review" alias' do let(:content) { "/request_review @#{developer.username}" } it_behaves_like 'assign_reviewer command' end context 'with no user' do let(:content) { '/assign_reviewer' } it_behaves_like 'failed command', "Failed to assign a reviewer because no user was specified." end context 'with extra text' do let(:content) { "/assign_reviewer #{developer.to_reference} do it!" } it_behaves_like 'failed command', 'a parse error' do let(:match_msg) { eq "Could not apply assign_reviewer command. Failed to find users for 'do' and 'it!'." } end end end describe 'unassign_reviewer command' do # CE does not have multiple reviewers, so basically anything # after /unassign_reviewer (including whitespace) will remove # all the current reviewers. let(:issuable) { create(:merge_request, reviewers: [developer]) } let(:content) { "/unassign_reviewer @#{developer.username}" } context 'with one user' do it_behaves_like 'unassign_reviewer command' end context 'with an issue instead of a merge request' do let(:issuable) { issue } it_behaves_like 'failed command', 'Could not apply unassign_reviewer command.' end context 'with anything after the command' do let(:content) { '/unassign_reviewer supercalifragilisticexpialidocious' } it_behaves_like 'unassign_reviewer command' end context 'with the "remove_reviewer" alias' do let(:content) { "/remove_reviewer @#{developer.username}" } it_behaves_like 'unassign_reviewer command' end context 'with no user' do let(:content) { '/unassign_reviewer' } it_behaves_like 'unassign_reviewer command' end end context 'unassign command' do let(:content) { '/unassign' } context 'Issue' do it 'populates assignee_ids: [] if content contains /unassign' do issue.update!(assignee_ids: [developer.id]) _, updates, _ = service.execute(content, issue) expect(updates).to eq(assignee_ids: []) end it 'returns the unassign message for all the assignee if content contains /unassign' do issue.update!(assignee_ids: [developer.id, developer2.id]) _, _, message = service.execute(content, issue) expect(message).to eq("Removed assignees #{developer.to_reference} and #{developer2.to_reference}.") end end context 'Merge Request' do it 'populates assignee_ids: [] if content contains /unassign' do merge_request.update!(assignee_ids: [developer.id]) _, updates, _ = service.execute(content, merge_request) expect(updates).to eq(assignee_ids: []) end it 'returns the unassign message for all the assignee if content contains /unassign' do merge_request.update!(assignee_ids: [developer.id, developer2.id]) _, _, message = service.execute(content, merge_request) expect(message).to eq("Removed assignees #{developer.to_reference} and #{developer2.to_reference}.") end end end context 'project milestones' do before do milestone end it_behaves_like 'milestone command' do let(:content) { "/milestone %#{milestone.title}" } let(:issuable) { issue } end it_behaves_like 'milestone command' do let(:content) { "/milestone %#{milestone.title}" } let(:issuable) { merge_request } end end context 'only group milestones available' do let_it_be(:ancestor_group) { create(:group) } let_it_be(:group) { create(:group, parent: ancestor_group) } let_it_be(:project) { create(:project, :public, namespace: group) } let_it_be(:milestone) { create(:milestone, group: ancestor_group, title: '10.0') } before_all do project.add_developer(developer) end it_behaves_like 'milestone command' do let(:content) { "/milestone %#{milestone.title}" } let(:issuable) { issue } end it_behaves_like 'milestone command' do let(:content) { "/milestone %#{milestone.title}" } let(:issuable) { merge_request } end end it_behaves_like 'remove_milestone command' do let(:content) { '/remove_milestone' } let(:issuable) { issue } end it_behaves_like 'remove_milestone command' do let(:content) { '/remove_milestone' } let(:issuable) { merge_request } end it_behaves_like 'label command' do let(:content) { %(/label ~"#{inprogress.title}" ~#{bug.title} ~unknown) } let(:issuable) { issue } end it_behaves_like 'label command' do let(:content) { %(/label ~"#{inprogress.title}" ~#{bug.title} ~unknown) } let(:issuable) { merge_request } end context 'with a colon label' do let(:bug) { create(:label, project: project, title: 'Category:Bug') } let(:inprogress) { create(:label, project: project, title: 'status:in:progress') } context 'when quoted' do let(:content) { %(/label ~"#{inprogress.title}" ~"#{bug.title}" ~unknown) } it_behaves_like 'label command' do let(:issuable) { merge_request } end it_behaves_like 'label command' do let(:issuable) { issue } end end context 'when unquoted' do let(:content) { %(/label ~#{inprogress.title} ~#{bug.title} ~unknown) } it_behaves_like 'label command' do let(:issuable) { merge_request } end it_behaves_like 'label command' do let(:issuable) { issue } end end end context 'with a scoped label' do let(:bug) { create(:label, :scoped, project: project) } let(:inprogress) { create(:label, project: project, title: 'three::part::label') } context 'when quoted' do let(:content) { %(/label ~"#{inprogress.title}" ~"#{bug.title}" ~unknown) } it_behaves_like 'label command' do let(:issuable) { merge_request } end it_behaves_like 'label command' do let(:issuable) { issue } end end context 'when unquoted' do let(:content) { %(/label ~#{inprogress.title} ~#{bug.title} ~unknown) } it_behaves_like 'label command' do let(:issuable) { merge_request } end it_behaves_like 'label command' do let(:issuable) { issue } end end end it_behaves_like 'multiple label command' do let(:content) { %(/label ~"#{inprogress.title}" \n/label ~#{bug.title}) } let(:issuable) { issue } end it_behaves_like 'multiple label with same argument' do let(:content) { %(/label ~"#{inprogress.title}" \n/label ~#{inprogress.title}) } let(:issuable) { issue } end it_behaves_like 'multiword label name starting without ~' do let(:content) { %(/label "#{helmchart.title}") } let(:issuable) { issue } end it_behaves_like 'multiword label name starting without ~' do let(:content) { %(/label "#{helmchart.title}") } let(:issuable) { merge_request } end it_behaves_like 'label name is included in the middle of another label name' do let(:content) { %(/label ~"#{helmchart.title}") } let(:issuable) { issue } end it_behaves_like 'label name is included in the middle of another label name' do let(:content) { %(/label ~"#{helmchart.title}") } let(:issuable) { merge_request } end it_behaves_like 'unlabel command' do let(:content) { %(/unlabel ~"#{inprogress.title}") } let(:issuable) { issue } end it_behaves_like 'unlabel command' do let(:content) { %(/unlabel ~"#{inprogress.title}") } let(:issuable) { merge_request } end it_behaves_like 'multiple unlabel command' do let(:content) { %(/unlabel ~"#{inprogress.title}" \n/unlabel ~#{bug.title}) } let(:issuable) { issue } end it_behaves_like 'unlabel command with no argument' do let(:content) { %(/unlabel) } let(:issuable) { issue } end it_behaves_like 'unlabel command with no argument' do let(:content) { %(/unlabel) } let(:issuable) { merge_request } end it_behaves_like 'relabel command' do let(:content) { %(/relabel ~"#{inprogress.title}") } let(:issuable) { issue } end it_behaves_like 'relabel command' do let(:content) { %(/relabel ~"#{inprogress.title}") } let(:issuable) { merge_request } end it_behaves_like 'done command' do let(:content) { '/done' } let(:issuable) { issue } end it_behaves_like 'done command' do let(:content) { '/done' } let(:issuable) { merge_request } end it_behaves_like 'done command' do let(:content) { '/done' } let(:issuable) { work_item } end it_behaves_like 'subscribe command' do let(:content) { '/subscribe' } let(:issuable) { issue } end it_behaves_like 'subscribe command' do let(:content) { '/subscribe' } let(:issuable) { merge_request } end it_behaves_like 'subscribe command' do let(:content) { '/subscribe' } let(:issuable) { work_item } end it_behaves_like 'unsubscribe command' do let(:content) { '/unsubscribe' } let(:issuable) { issue } end it_behaves_like 'unsubscribe command' do let(:content) { '/unsubscribe' } let(:issuable) { merge_request } end it_behaves_like 'unsubscribe command' do let(:content) { '/unsubscribe' } let(:issuable) { work_item } end it_behaves_like 'failed command', 'Could not apply due command.' do let(:content) { '/due 2016-08-28' } let(:issuable) { merge_request } end it_behaves_like 'remove_due_date command' do let(:content) { '/remove_due_date' } let(:issuable) { issue } end it_behaves_like 'draft command' do let(:content) { '/draft' } let(:issuable) { merge_request } end it_behaves_like 'draft/ready command no action' do let(:content) { '/draft' } let(:issuable) { merge_request } before do issuable.update!(title: issuable.draft_title) end end it_behaves_like 'draft/ready command no action' do let(:content) { '/ready' } let(:issuable) { merge_request } end it_behaves_like 'ready command' do let(:content) { '/ready' } let(:issuable) { merge_request } end it_behaves_like 'failed command', 'Could not apply remove_due_date command.' do let(:content) { '/remove_due_date' } let(:issuable) { merge_request } end it_behaves_like 'estimate command' do let(:content) { '/estimate 1h' } let(:issuable) { issue } end it_behaves_like 'estimate command' do let(:content) { '/estimate_time 1h' } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { '/estimate' } let(:issuable) { issue } end context 'when provided an invalid estimate' do let(:content) { '/estimate abc' } let(:issuable) { issue } it 'populates {} if content contains an unsupported command' do _, updates, _ = service.execute(content, issuable) expect(updates[:time_estimate]).to be_nil end it "returns empty message" do _, _, message = service.execute(content, issuable) expect(message).to be_empty end end it_behaves_like 'spend command' do let(:content) { '/spend 1h' } let(:issuable) { issue } end it_behaves_like 'spend command' do let(:content) { '/spent 1h' } let(:issuable) { issue } end it_behaves_like 'spend command' do let(:content) { '/spend_time 1h' } let(:issuable) { issue } end it_behaves_like 'spend command with negative time' do let(:content) { '/spend -120m' } let(:issuable) { issue } end it_behaves_like 'spend command with negative time' do let(:content) { '/spent -120m' } let(:issuable) { issue } end it_behaves_like 'spend command with valid date' do let(:date) { '2016-02-02' } let(:content) { "/spend 30m #{date}" } let(:issuable) { issue } end it_behaves_like 'spend command with valid date' do let(:date) { '2016-02-02' } let(:content) { "/spent 30m #{date}" } let(:issuable) { issue } end it_behaves_like 'spend command with invalid date' do let(:content) { '/spend 30m 17-99-99' } let(:issuable) { issue } end it_behaves_like 'spend command with invalid date' do let(:content) { '/spent 30m 17-99-99' } let(:issuable) { issue } end it_behaves_like 'spend command with future date' do let(:content) { '/spend 30m 6017-10-10' } let(:issuable) { issue } end it_behaves_like 'spend command with future date' do let(:content) { '/spent 30m 6017-10-10' } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { '/spend' } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { '/spent' } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { '/spend abc' } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { '/spent abc' } let(:issuable) { issue } end it_behaves_like 'remove_estimate command' do let(:content) { '/remove_estimate' } let(:issuable) { issue } end it_behaves_like 'remove_estimate command' do let(:content) { '/remove_time_estimate' } let(:issuable) { issue } end it_behaves_like 'remove_time_spent command' do let(:content) { '/remove_time_spent' } let(:issuable) { issue } end context '/confidential' do it_behaves_like 'confidential command' do let(:content) { '/confidential' } let(:issuable) { issue } end it_behaves_like 'confidential command' do let(:content) { '/confidential' } let(:issuable) { create(:incident, project: project) } end context 'when non-member is creating a new issue' do let(:service) { described_class.new(project, create(:user)) } it_behaves_like 'confidential command' do let(:content) { '/confidential' } let(:issuable) { build(:issue, project: project) } end end end it_behaves_like 'lock command' do let(:content) { '/lock' } let(:issuable) { issue } end it_behaves_like 'lock command' do let(:content) { '/lock' } let(:issuable) { merge_request } end it_behaves_like 'unlock command' do let(:content) { '/unlock' } let(:issuable) { issue } end it_behaves_like 'unlock command' do let(:content) { '/unlock' } let(:issuable) { merge_request } end context '/todo' do let(:content) { '/todo' } context 'if issuable is an Issue' do it_behaves_like 'todo command' do let(:issuable) { issue } end end context 'if issuable is a work item' do it_behaves_like 'todo command' do let(:issuable) { work_item } end end context 'if issuable is a MergeRequest' do it_behaves_like 'todo command' do let(:issuable) { merge_request } end end context 'if issuable is a Commit' do it_behaves_like 'failed command', 'Could not apply todo command.' do let(:issuable) { commit } end end end context '/due command' do it 'returns invalid date format message when the due date is invalid' do issue = build(:issue, project: project) _, _, message = service.execute('/due invalid date', issue) expect(message).to eq(_('Failed to set due date because the date format is invalid.')) end it_behaves_like 'due command' do let(:content) { '/due 2016-08-28' } let(:issuable) { issue } end it_behaves_like 'due command' do let(:content) { '/due tomorrow' } let(:issuable) { issue } let(:expected_date) { Date.tomorrow } end it_behaves_like 'due command' do let(:content) { '/due 5 days from now' } let(:issuable) { issue } let(:expected_date) { 5.days.from_now.to_date } end it_behaves_like 'due command' do let(:content) { '/due in 2 days' } let(:issuable) { issue } let(:expected_date) { 2.days.from_now.to_date } end end context '/copy_metadata command' do let(:todo_label) { create(:label, project: project, title: 'To Do') } let(:inreview_label) { create(:label, project: project, title: 'In Review') } it 'is available when the user is a developer' do expect(service.available_commands(issue)).to include(a_hash_including(name: :copy_metadata)) end context 'when the user does not have permission' do let(:guest) { create(:user) } let(:service) { described_class.new(project, guest) } it 'is not available' do expect(service.available_commands(issue)).not_to include(a_hash_including(name: :copy_metadata)) end end it_behaves_like 'failed command' do let(:content) { '/copy_metadata' } let(:issuable) { issue } end it_behaves_like 'copy_metadata command' do let(:source_issuable) { create(:labeled_issue, project: project, labels: [inreview_label, todo_label]) } let(:content) { "/copy_metadata #{source_issuable.to_reference}" } let(:issuable) { build(:issue, project: project) } end it_behaves_like 'copy_metadata command' do let(:source_issuable) { create(:labeled_issue, project: project, labels: [inreview_label, todo_label]) } let(:content) { "/copy_metadata #{source_issuable.to_reference}" } let(:issuable) { issue } end context 'when the parent issuable has a milestone' do it_behaves_like 'copy_metadata command' do let(:source_issuable) { create(:labeled_issue, project: project, labels: [todo_label, inreview_label], milestone: milestone) } let(:content) { "/copy_metadata #{source_issuable.to_reference(project)}" } let(:issuable) { issue } end end context 'when more than one issuable is passed' do it_behaves_like 'copy_metadata command' do let(:source_issuable) { create(:labeled_issue, project: project, labels: [inreview_label, todo_label]) } let(:other_label) { create(:label, project: project, title: 'Other') } let(:other_source_issuable) { create(:labeled_issue, project: project, labels: [other_label]) } let(:content) { "/copy_metadata #{source_issuable.to_reference} #{other_source_issuable.to_reference}" } let(:issuable) { issue } end end context 'cross project references' do it_behaves_like 'failed command' do let(:other_project) { create(:project, :public) } let(:source_issuable) { create(:labeled_issue, project: other_project, labels: [todo_label, inreview_label]) } let(:content) { "/copy_metadata #{source_issuable.to_reference(project)}" } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { "/copy_metadata imaginary##{non_existing_record_iid}" } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:other_project) { create(:project, :private) } let(:source_issuable) { create(:issue, project: other_project) } let(:content) { "/copy_metadata #{source_issuable.to_reference(project)}" } let(:issuable) { issue } end end end context '/duplicate command' do it_behaves_like 'duplicate command' do let(:issue_duplicate) { create(:issue, project: project) } let(:content) { "/duplicate #{issue_duplicate.to_reference}" } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { '/duplicate' } let(:issuable) { issue } end context 'cross project references' do it_behaves_like 'duplicate command' do let(:other_project) { create(:project, :public) } let(:issue_duplicate) { create(:issue, project: other_project) } let(:content) { "/duplicate #{issue_duplicate.to_reference(project)}" } let(:issuable) { issue } end it_behaves_like 'failed command', _('Failed to mark this issue as a duplicate because referenced issue was not found.') do let(:content) { "/duplicate imaginary##{non_existing_record_iid}" } let(:issuable) { issue } end it_behaves_like 'failed command', _('Failed to mark this issue as a duplicate because referenced issue was not found.') do let(:other_project) { create(:project, :private) } let(:issue_duplicate) { create(:issue, project: other_project) } let(:content) { "/duplicate #{issue_duplicate.to_reference(project)}" } let(:issuable) { issue } end end end context 'when current_user cannot :admin_issue' do let(:visitor) { create(:user) } let(:issue) { create(:issue, project: project, author: visitor) } let(:service) { described_class.new(project, visitor) } it_behaves_like 'failed command', 'Could not apply assign command.' do let(:content) { "/assign @#{developer.username}" } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply unassign command.' do let(:content) { '/unassign' } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply milestone command.' do let(:content) { "/milestone %#{milestone.title}" } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply remove_milestone command.' do let(:content) { '/remove_milestone' } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply label command.' do let(:content) { %(/label ~"#{inprogress.title}" ~#{bug.title} ~unknown) } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply unlabel command.' do let(:content) { %(/unlabel ~"#{inprogress.title}") } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply relabel command.' do let(:content) { %(/relabel ~"#{inprogress.title}") } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply due command.' do let(:content) { '/due tomorrow' } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply remove_due_date command.' do let(:content) { '/remove_due_date' } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply confidential command.' do let(:content) { '/confidential' } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply lock command.' do let(:content) { '/lock' } let(:issuable) { issue } end it_behaves_like 'failed command', 'Could not apply unlock command.' do let(:content) { '/unlock' } let(:issuable) { issue } end end context '/award command' do it_behaves_like 'award command' do let(:content) { '/award :100:' } let(:issuable) { issue } end it_behaves_like 'award command' do let(:content) { '/award :100:' } let(:issuable) { merge_request } end it_behaves_like 'award command' do let(:content) { '/award :100:' } let(:issuable) { work_item } end context 'ignores command with no argument' do it_behaves_like 'failed command' do let(:content) { '/award' } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { '/award' } let(:issuable) { work_item } end end context 'ignores non-existing / invalid emojis' do it_behaves_like 'failed command' do let(:content) { '/award noop' } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { '/award :lorem_ipsum:' } let(:issuable) { issue } end it_behaves_like 'failed command' do let(:content) { '/award :lorem_ipsum:' } let(:issuable) { work_item } end end context 'if issuable is a Commit' do let(:content) { '/award :100:' } let(:issuable) { commit } it_behaves_like 'failed command', 'Could not apply award command.' end end context '/shrug command' do it_behaves_like 'shrug command' do let(:content) { '/shrug people are people' } let(:issuable) { issue } end it_behaves_like 'shrug command' do let(:content) { '/shrug' } let(:issuable) { issue } end end context '/tableflip command' do it_behaves_like 'tableflip command' do let(:content) { '/tableflip curse your sudden but enviable betrayal' } let(:issuable) { issue } end it_behaves_like 'tableflip command' do let(:content) { '/tableflip' } let(:issuable) { issue } end end context '/target_branch command' do let(:non_empty_project) { create(:project, :repository) } let(:another_merge_request) { create(:merge_request, author: developer, source_project: non_empty_project) } let(:service) { described_class.new(non_empty_project, developer) } it 'updates target_branch if /target_branch command is executed' do _, updates, _ = service.execute('/target_branch merge-test', merge_request) expect(updates).to eq(target_branch: 'merge-test') end it 'handles blanks around param' do _, updates, _ = service.execute('/target_branch merge-test ', merge_request) expect(updates).to eq(target_branch: 'merge-test') end context 'ignores command with no argument' do it_behaves_like 'failed command', 'Could not apply target_branch command.' do let(:content) { '/target_branch' } let(:issuable) { another_merge_request } end end context 'ignores non-existing target branch' do it_behaves_like 'failed command', 'Could not apply target_branch command.' do let(:content) { '/target_branch totally_non_existing_branch' } let(:issuable) { another_merge_request } end end it 'returns the target_branch message' do _, _, message = service.execute('/target_branch merge-test', merge_request) expect(message).to eq('Set target branch to merge-test.') end end context '/board_move command' do let_it_be(:todo) { create(:label, project: project, title: 'To Do') } let_it_be(:inreview) { create(:label, project: project, title: 'In Review') } let(:content) { %(/board_move ~"#{inreview.title}") } let_it_be(:board) { create(:board, project: project) } let_it_be(:todo_list) { create(:list, board: board, label: todo) } let_it_be(:inreview_list) { create(:list, board: board, label: inreview) } let_it_be(:inprogress_list) { create(:list, board: board, label: inprogress) } it 'populates remove_label_ids for all current board columns' do issue.update!(label_ids: [todo.id, inprogress.id]) _, updates, _ = service.execute(content, issue) expect(updates[:remove_label_ids]).to match_array([todo.id, inprogress.id]) end it 'populates add_label_ids with the id of the given label' do _, updates, _ = service.execute(content, issue) expect(updates[:add_label_ids]).to eq([inreview.id]) end it 'does not include the given label id in remove_label_ids' do issue.update!(label_ids: [todo.id, inreview.id]) _, updates, _ = service.execute(content, issue) expect(updates[:remove_label_ids]).to match_array([todo.id]) end it 'does not remove label ids that are not lists on the board' do issue.update!(label_ids: [todo.id, bug.id]) _, updates, _ = service.execute(content, issue) expect(updates[:remove_label_ids]).to match_array([todo.id]) end it 'returns board_move message' do issue.update!(label_ids: [todo.id, inprogress.id]) _, _, message = service.execute(content, issue) expect(message).to eq("Moved issue to ~#{inreview.id} column in the board.") end context 'if the project has multiple boards' do let(:issuable) { issue } before do create(:board, project: project) end it_behaves_like 'failed command', 'Could not apply board_move command.' end context 'if the given label does not exist' do let(:issuable) { issue } let(:content) { '/board_move ~"Fake Label"' } it_behaves_like 'failed command', 'Failed to move this issue because label was not found.' end context 'if multiple labels are given' do let(:issuable) { issue } let(:content) { %(/board_move ~"#{inreview.title}" ~"#{todo.title}") } it_behaves_like 'failed command', 'Failed to move this issue because only a single label can be provided.' end context 'if the given label is not a list on the board' do let(:issuable) { issue } let(:content) { %(/board_move ~"#{bug.title}") } it_behaves_like 'failed command', 'Failed to move this issue because label was not found.' end context 'if issuable is not an Issue' do let(:issuable) { merge_request } it_behaves_like 'failed command', 'Could not apply board_move command.' end end context '/tag command' do let(:issuable) { commit } context 'ignores command with no argument' do it_behaves_like 'failed command' do let(:content) { '/tag' } end end context 'tags a commit with a tag name' do it_behaves_like 'tag command' do let(:tag_name) { 'v1.2.3' } let(:tag_message) { nil } let(:content) { "/tag #{tag_name}" } end end context 'tags a commit with a tag name and message' do it_behaves_like 'tag command' do let(:tag_name) { 'v1.2.3' } let(:tag_message) { 'Stable release' } let(:content) { "/tag #{tag_name} #{tag_message}" } end end end it 'limits to commands passed' do content = "/shrug test\n/close" text, commands = service.execute(content, issue, only: [:shrug]) expect(commands).to be_empty expect(text).to eq("test #{described_class::SHRUG}\n/close") end it 'preserves leading whitespace' do content = " - list\n\n/close\n\ntest\n\n" text, _ = service.execute(content, issue) expect(text).to eq(" - list\n\ntest") end it 'tracks MAU for commands' do content = "/shrug test\n/assign me\n/milestone %4" expect(Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter) .to receive(:track_unique_action) .with('shrug', args: 'test', user: developer) expect(Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter) .to receive(:track_unique_action) .with('assign', args: 'me', user: developer) expect(Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter) .to receive(:track_unique_action) .with('milestone', args: '%4', user: developer) service.execute(content, issue) end context '/create_merge_request command' do let(:branch_name) { '1-feature' } let(:content) { "/create_merge_request #{branch_name}" } let(:issuable) { issue } context 'if issuable is not an Issue' do let(:issuable) { merge_request } it_behaves_like 'failed command', 'Could not apply create_merge_request command.' end context "when logged user cannot create_merge_requests in the project" do let(:project) { create(:project, :archived) } before do project.add_developer(developer) end it_behaves_like 'failed command', 'Could not apply create_merge_request command.' end context 'when logged user cannot push code to the project' do let(:project) { create(:project, :private) } let(:service) { described_class.new(project, create(:user)) } it_behaves_like 'failed command', 'Could not apply create_merge_request command.' end it 'populates create_merge_request with branch_name and issue iid' do _, updates, _ = service.execute(content, issuable) expect(updates).to eq(create_merge_request: { branch_name: branch_name, issue_iid: issuable.iid }) end it 'returns the create_merge_request message' do _, _, message = service.execute(content, issuable) expect(message).to eq("Created branch '#{branch_name}' and a merge request to resolve this issue.") end end context 'submit_review command' do using RSpec::Parameterized::TableSyntax where(:note) do [ 'I like it', '/submit_review' ] end with_them do let(:content) { '/submit_review' } let!(:draft_note) { create(:draft_note, note: note, merge_request: merge_request, author: developer) } it 'submits the users current review' do _, _, message = service.execute(content, merge_request) expect { draft_note.reload }.to raise_error(ActiveRecord::RecordNotFound) expect(message).to eq('Submitted the current review.') end end context 'when parameters are passed' do context 'with approve parameter' do it 'calls MergeRequests::ApprovalService service' do expect_next_instance_of( MergeRequests::ApprovalService, project: merge_request.project, current_user: current_user ) do |service| expect(service).to receive(:execute).with(merge_request) end _, _, message = service.execute('/submit_review approve', merge_request) expect(message).to eq('Submitted the current review.') end end context 'with review state parameter' do it 'calls MergeRequests::UpdateReviewerStateService service' do expect_next_instance_of( MergeRequests::UpdateReviewerStateService, project: merge_request.project, current_user: current_user ) do |service| expect(service).to receive(:execute).with(merge_request, 'requested_changes') end _, _, message = service.execute('/submit_review requested_changes', merge_request) expect(message).to eq('Submitted the current review.') end end end end context 'request_changes command' do let(:merge_request) { create(:merge_request, source_project: project) } let(:content) { '/request_changes' } context "when `mr_request_changes` feature flag is disabled" do before do stub_feature_flags(mr_request_changes: false) end it 'does not call MergeRequests::UpdateReviewerStateService' do expect(MergeRequests::UpdateReviewerStateService).not_to receive(:new) service.execute(content, merge_request) end end context "when the user is a reviewer" do before do create(:merge_request_reviewer, merge_request: merge_request, reviewer: current_user) end it 'calls MergeRequests::UpdateReviewerStateService with requested_changes' do expect_next_instance_of( MergeRequests::UpdateReviewerStateService, project: project, current_user: current_user ) do |service| expect(service).to receive(:execute).with(merge_request, "requested_changes").and_return({ status: :success }) end _, _, message = service.execute(content, merge_request) expect(message).to eq('Changes requested to the current merge request.') end it 'returns error message from MergeRequests::UpdateReviewerStateService' do expect_next_instance_of( MergeRequests::UpdateReviewerStateService, project: project, current_user: current_user ) do |service| expect(service).to receive(:execute).with(merge_request, "requested_changes").and_return({ status: :error, message: 'Error' }) end _, _, message = service.execute(content, merge_request) expect(message).to eq('Error') end end context "when the user is not a reviewer" do it 'does not call MergeRequests::UpdateReviewerStateService' do expect(MergeRequests::UpdateReviewerStateService).not_to receive(:new) service.execute(content, merge_request) end end it_behaves_like 'approve command unavailable' do let(:issuable) { issue } end end it_behaves_like 'issues link quick action', :relate do let(:user) { developer } end context 'unlink command' do let_it_be(:private_issue) { create(:issue, project: create(:project, :private)) } let_it_be(:other_issue) { create(:issue, project: project) } let(:content) { "/unlink #{other_issue.to_reference(issue)}" } subject(:unlink_issues) { service.execute(content, issue) } shared_examples 'command with failure' do it 'does not destroy issues relation' do expect { unlink_issues }.not_to change { IssueLink.count } end it 'return correct execution message' do expect(unlink_issues[2]).to eq('No linked issue matches the provided parameter.') end end context 'when command includes linked issue' do let_it_be(:link1) { create(:issue_link, source: issue, target: other_issue) } let_it_be(:link2) { create(:issue_link, source: issue, target: private_issue) } it 'executes command successfully' do expect { unlink_issues }.to change { IssueLink.count }.by(-1) expect(unlink_issues[2]).to eq("Removed link with #{other_issue.to_reference(issue)}.") expect(issue.notes.last.note).to eq("removed the relation with #{other_issue.to_reference}") expect(other_issue.notes.last.note).to eq("removed the relation with #{issue.to_reference}") end context 'when user has no access' do let(:content) { "/unlink #{private_issue.to_reference(issue)}" } it_behaves_like 'command with failure' end end context 'when provided issue is not linked' do it_behaves_like 'command with failure' end end context 'invite_email command' do let_it_be(:issuable) { issue } it_behaves_like 'failed command', "No email participants were added. Either none were provided, or they already exist." do let(:content) { '/invite_email' } end context 'with existing email participant' do let(:content) { '/invite_email [email protected]' } before do issuable.issue_email_participants.create!(email: "[email protected]") end it_behaves_like 'failed command', "No email participants were added. Either none were provided, or they already exist." end context 'with new email participants' do let(:content) { '/invite_email [email protected] [email protected]' } subject(:add_emails) { service.execute(content, issuable) } it 'returns message' do _, _, message = add_emails expect(message).to eq('Added [email protected] and [email protected].') end it 'adds 2 participants' do expect { add_emails }.to change { issue.issue_email_participants.count }.by(2) end context 'with mixed case email' do let(:content) { '/invite_email [email protected]' } it 'returns correctly cased message' do _, _, message = add_emails expect(message).to eq('Added [email protected].') end end context 'with invalid email' do let(:content) { '/invite_email [email protected] bad_email' } it 'only adds valid emails' do expect { add_emails }.to change { issue.issue_email_participants.count }.by(1) end end context 'with existing email' do let(:content) { '/invite_email [email protected] [email protected]' } it 'only adds new emails' do issue.issue_email_participants.create!(email: '[email protected]') expect { add_emails }.to change { issue.issue_email_participants.count }.by(1) end it 'only adds new (case insensitive) emails' do issue.issue_email_participants.create!(email: '[email protected]') expect { add_emails }.to change { issue.issue_email_participants.count }.by(1) end end context 'with duplicate email' do let(:content) { '/invite_email [email protected] [email protected]' } it 'only adds unique new emails' do expect { add_emails }.to change { issue.issue_email_participants.count }.by(1) end end context 'with more than 6 emails' do let(:content) { '/invite_email [email protected] [email protected] [email protected] [email protected] [email protected] [email protected] [email protected]' } it 'only adds 6 new emails' do expect { add_emails }.to change { issue.issue_email_participants.count }.by(6) end end context 'with feature flag disabled' do before do stub_feature_flags(issue_email_participants: false) end it 'does not add any participants' do expect { add_emails }.not_to change { issue.issue_email_participants.count } end end end it 'is part of the available commands' do expect(service.available_commands(issuable)).to include(a_hash_including(name: :invite_email)) end context 'with non-persisted issue' do let(:issuable) { build(:issue) } it 'is not part of the available commands' do expect(service.available_commands(issuable)).not_to include(a_hash_including(name: :invite_email)) end end end context 'severity command' do let_it_be_with_reload(:issuable) { create(:incident, project: project) } subject(:set_severity) { service.execute(content, issuable) } it_behaves_like 'failed command', 'No severity matches the provided parameter' do let(:content) { '/severity something' } end shared_examples 'updates the severity' do |new_severity| it do expect { set_severity }.to change { issuable.severity }.from('unknown').to(new_severity) end end context 'when quick action is used on creation' do let(:content) { '/severity s3' } let(:issuable) { build(:incident, project: project) } it_behaves_like 'updates the severity', 'medium' context 'issuable does not support severity' do let(:issuable) { build(:issue, project: project) } it_behaves_like 'failed command', '' end end context 'severity given with S format' do let(:content) { '/severity s3' } it_behaves_like 'updates the severity', 'medium' end context 'severity given with number format' do let(:content) { '/severity 3' } it_behaves_like 'updates the severity', 'medium' end context 'severity given with text format' do let(:content) { '/severity medium' } it_behaves_like 'updates the severity', 'medium' end context 'an issuable that does not support severity' do let_it_be_with_reload(:issuable) { create(:issue, project: project) } it_behaves_like 'failed command', 'Could not apply severity command.' do let(:content) { '/severity s3' } end end end context 'approve command' do let(:merge_request) { create(:merge_request, source_project: project) } let(:content) { '/approve' } it 'approves the current merge request' do service.execute(content, merge_request) expect(merge_request.approved_by_users).to eq([developer]) end context "when the user can't approve" do before do project.team.truncate project.add_guest(developer) end it 'does not approve the MR' do service.execute(content, merge_request) expect(merge_request.approved_by_users).to be_empty end end it_behaves_like 'approve command unavailable' do let(:issuable) { issue } end end context 'unapprove command' do let!(:merge_request) { create(:merge_request, source_project: project) } let(:content) { '/unapprove' } before do service.execute('/approve', merge_request) end it 'unapproves the current merge request' do service.execute(content, merge_request) expect(merge_request.approved_by_users).to be_empty end it 'calls MergeRequests::UpdateReviewerStateService' do expect_next_instance_of( MergeRequests::UpdateReviewerStateService, project: project, current_user: current_user ) do |service| expect(service).to receive(:execute).with(merge_request, "unreviewed") end service.execute(content, merge_request) end context "when the user can't unapprove" do before do project.team.truncate project.add_guest(developer) end it 'does not unapprove the MR' do service.execute(content, merge_request) expect(merge_request.approved_by_users).to eq([developer]) end it_behaves_like 'unapprove command unavailable' do let(:issuable) { issue } end end end context 'crm_contact commands' do let_it_be(:new_contact) { create(:contact, group: group) } let_it_be(:existing_contact) { create(:contact, group: group) } let(:add_command) { service.execute("/add_contacts #{new_contact.email}", issue) } let(:remove_command) { service.execute("/remove_contacts #{existing_contact.email}", issue) } before do issue.project.group.add_developer(developer) create(:issue_customer_relations_contact, issue: issue, contact: existing_contact) end it 'add_contacts command adds the contact' do _, updates, message = add_command expect(updates).to eq(add_contacts: [new_contact.email]) expect(message).to eq('One or more contacts were successfully added.') end it 'remove_contacts command removes the contact' do _, updates, message = remove_command expect(updates).to eq(remove_contacts: [existing_contact.email]) expect(message).to eq('One or more contacts were successfully removed.') end end context 'when using an alias' do it 'returns the correct execution message' do content = "/labels ~#{bug.title}" _, _, message = service.execute(content, issue) expect(message).to eq("Added ~\"Bug\" label.") end end it_behaves_like 'quick actions that change work item type' context '/set_parent command' do let_it_be(:parent) { create(:work_item, :issue, project: project) } let_it_be(:work_item) { create(:work_item, :task, project: project) } let_it_be(:parent_ref) { parent.to_reference(project) } let(:content) { "/set_parent #{parent_ref}" } it 'returns success message' do _, _, message = service.execute(content, work_item) expect(message).to eq('Work item parent set successfully') end it 'sets correct update params' do _, updates, _ = service.execute(content, work_item) expect(updates).to eq(set_parent: parent) end end end describe '#explain' do let(:service) { described_class.new(project, developer) } let(:merge_request) { create(:merge_request, source_project: project) } describe 'close command' do let(:content) { '/close' } it 'includes issuable name' do content_result, explanations = service.explain(content, issue) expect(content_result).to eq('') expect(explanations).to eq(['Closes this issue.']) end end describe 'reopen command' do let(:content) { '/reopen' } let(:merge_request) { create(:merge_request, :closed, source_project: project) } it 'includes issuable name' do _, explanations = service.explain(content, merge_request) expect(explanations).to eq(['Reopens this merge request.']) end end describe 'title command' do let(:content) { '/title This is new title' } it 'includes new title' do _, explanations = service.explain(content, issue) expect(explanations).to eq(['Changes the title to "This is new title".']) end end describe 'assign command' do shared_examples 'assigns developer' do it 'tells us we will assign the developer' do _, explanations = service.explain(content, merge_request) expect(explanations).to eq(["Assigns @#{developer.username}."]) end end context 'when using a reference' do let(:content) { "/assign @#{developer.username}" } include_examples 'assigns developer' end context 'when using a bare username' do let(:content) { "/assign #{developer.username}" } include_examples 'assigns developer' end context 'when using me' do let(:content) { "/assign me" } include_examples 'assigns developer' end context 'when there are unparseable arguments' do let(:arg) { "#{developer.username} to this issue" } let(:content) { "/assign #{arg}" } it 'tells us why we cannot do that' do _, explanations = service.explain(content, merge_request) expect(explanations) .to contain_exactly "Problem with assign command: Failed to find users for 'to', 'this', and 'issue'." end end end describe 'unassign command' do let(:content) { '/unassign' } let(:issue) { create(:issue, project: project, assignees: [developer]) } it 'includes current assignee reference' do _, explanations = service.explain(content, issue) expect(explanations).to eq(["Removes assignee @#{developer.username}."]) end end describe 'unassign_reviewer command' do let(:content) { '/unassign_reviewer' } let(:merge_request) { create(:merge_request, source_project: project, reviewers: [developer]) } it 'includes current assignee reference' do _, explanations = service.explain(content, merge_request) expect(explanations).to eq(["Removes reviewer @#{developer.username}."]) end end describe 'assign_reviewer command' do let(:content) { "/assign_reviewer #{developer.to_reference}" } let(:merge_request) { create(:merge_request, source_project: project, assignees: [developer]) } it 'includes only the user reference' do _, explanations = service.explain(content, merge_request) expect(explanations).to eq(["Assigns #{developer.to_reference} as reviewer."]) end end describe 'milestone command' do let(:content) { '/milestone %wrong-milestone' } let!(:milestone) { create(:milestone, project: project, title: '9.10') } it 'is empty when milestone reference is wrong' do _, explanations = service.explain(content, issue) expect(explanations).to eq([]) end end describe 'remove milestone command' do let(:content) { '/remove_milestone' } let(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) } it 'includes current milestone name' do _, explanations = service.explain(content, merge_request) expect(explanations).to eq(['Removes %"9.10" milestone.']) end end describe 'label command' do let(:content) { '/label ~missing' } let!(:label) { create(:label, project: project) } it 'is empty when there are no correct labels' do _, explanations = service.explain(content, issue) expect(explanations).to eq([]) end end describe 'unlabel command' do let(:content) { '/unlabel' } it 'says all labels if no parameter provided' do merge_request.update!(label_ids: [bug.id]) _, explanations = service.explain(content, merge_request) expect(explanations).to eq([_('Removes all labels.')]) end end describe 'relabel command' do let(:content) { "/relabel #{bug.title}" } let(:feature) { create(:label, project: project, title: 'Feature') } it 'includes label name' do issue.update!(label_ids: [feature.id]) _, explanations = service.explain(content, issue) expect(explanations).to eq(["Replaces all labels with ~#{bug.id} label."]) end end describe 'subscribe command' do let(:content) { '/subscribe' } it 'includes issuable name' do _, explanations = service.explain(content, issue) expect(explanations).to eq(['Subscribes to this issue.']) end end describe 'unsubscribe command' do let(:content) { '/unsubscribe' } it 'includes issuable name' do merge_request.subscribe(developer, project) _, explanations = service.explain(content, merge_request) expect(explanations).to eq(['Unsubscribes from this merge request.']) end end describe 'due command' do let(:content) { '/due April 1st 2016' } it 'includes the date' do _, explanations = service.explain(content, issue) expect(explanations).to eq(['Sets the due date to Apr 1, 2016.']) end end describe 'draft command set' do let(:content) { '/draft' } it 'includes the new status' do _, explanations = service.explain(content, merge_request) expect(explanations).to match_array(['Marks this merge request as a draft.']) end it 'includes the no change message when status unchanged' do merge_request.update!(title: merge_request.draft_title) _, explanations = service.explain(content, merge_request) expect(explanations).to match_array(["No change to this merge request's draft status."]) end end describe 'ready command' do let(:content) { '/ready' } it 'includes the new status' do merge_request.update!(title: merge_request.draft_title) _, explanations = service.explain(content, merge_request) expect(explanations).to match_array(['Marks this merge request as ready.']) end it 'includes the no change message when status unchanged' do _, explanations = service.explain(content, merge_request) expect(explanations).to match_array(["No change to this merge request's draft status."]) end end describe 'award command' do let(:content) { '/award :confetti_ball: ' } it 'includes the emoji' do _, explanations = service.explain(content, issue) expect(explanations).to eq(['Toggles :confetti_ball: emoji award.']) end end describe 'estimate command' do context 'positive estimation' do let(:content) { '/estimate 79d' } it 'includes the formatted duration' do _, explanations = service.explain(content, merge_request) expect(explanations).to eq(['Sets time estimate to 3mo 3w 4d.']) end end context 'zero estimation' do let(:content) { '/estimate 0' } it 'includes the formatted duration' do _, explanations = service.explain(content, merge_request) expect(explanations).to eq(['Removes time estimate.']) end end context 'negative estimation' do let(:content) { '/estimate -79d' } it 'does not explain' do _, explanations = service.explain(content, merge_request) expect(explanations).to be_empty end end context 'invalid estimation' do let(:content) { '/estimate a' } it 'does not explain' do _, explanations = service.explain(content, merge_request) expect(explanations).to be_empty end end end describe 'spend command' do it 'includes the formatted duration and proper verb when using /spend' do _, explanations = service.explain('/spend -120m', issue) expect(explanations).to eq(['Subtracts 2h spent time.']) end it 'includes the formatted duration and proper verb when using /spent' do _, explanations = service.explain('/spent -120m', issue) expect(explanations).to eq(['Subtracts 2h spent time.']) end end describe 'target branch command' do let(:content) { '/target_branch my-feature ' } it 'includes the branch name' do _, explanations = service.explain(content, merge_request) expect(explanations).to eq(['Sets target branch to my-feature.']) end end describe 'board move command' do let(:content) { "/board_move ~#{bug.title}" } let!(:board) { create(:board, project: project) } it 'includes the label name' do _, explanations = service.explain(content, issue) expect(explanations).to eq(["Moves issue to ~#{bug.id} column in the board."]) end end describe 'move issue to another project command' do let(:content) { '/move test/project' } it 'includes the project name' do _, explanations = service.explain(content, issue) expect(explanations).to eq(["Moves this issue to test/project."]) end end describe 'tag a commit' do describe 'with a tag name' do context 'without a message' do let(:content) { '/tag v1.2.3' } it 'includes the tag name only' do _, explanations = service.explain(content, commit) expect(explanations).to eq(["Tags this commit to v1.2.3."]) end end context 'with an empty message' do let(:content) { '/tag v1.2.3 ' } it 'includes the tag name only' do _, explanations = service.explain(content, commit) expect(explanations).to eq(["Tags this commit to v1.2.3."]) end end end describe 'with a tag name and message' do let(:content) { '/tag v1.2.3 Stable release' } it 'includes the tag name and message' do _, explanations = service.explain(content, commit) expect(explanations).to eq(["Tags this commit to v1.2.3 with \"Stable release\"."]) end end end describe 'create a merge request' do context 'with no branch name' do let(:content) { '/create_merge_request' } it 'uses the default branch name' do _, explanations = service.explain(content, issue) expect(explanations).to eq([_('Creates a branch and a merge request to resolve this issue.')]) end it 'returns the execution message using the default branch name' do _, _, message = service.execute(content, issue) expect(message).to eq(_('Created a branch and a merge request to resolve this issue.')) end end context 'with a branch name' do let(:content) { '/create_merge_request foo' } it 'uses the given branch name' do _, explanations = service.explain(content, issue) expect(explanations).to eq(["Creates branch 'foo' and a merge request to resolve this issue."]) end it 'returns the execution message using the given branch name' do _, _, message = service.execute(content, issue) expect(message).to eq("Created branch 'foo' and a merge request to resolve this issue.") end end end describe "#commands_executed_count" do it 'counts commands executed' do content = "/close and \n/assign me and \n/title new title" service.execute(content, issue) expect(service.commands_executed_count).to eq(3) end end describe 'crm commands' do let(:add_contacts) { '/add_contacts' } let(:remove_contacts) { '/remove_contacts' } before_all do group.add_developer(developer) end context 'when group has no contacts' do it '/add_contacts is not available' do _, explanations = service.explain(add_contacts, issue) expect(explanations).to be_empty end end context 'when group has contacts' do let!(:contact) { create(:contact, group: group) } it '/add_contacts is available' do _, explanations = service.explain(add_contacts, issue) expect(explanations).to contain_exactly("Add customer relation contact(s).") end context 'when issue has no contacts' do it '/remove_contacts is not available' do _, explanations = service.explain(remove_contacts, issue) expect(explanations).to be_empty end end context 'when issue has contacts' do let!(:issue_contact) { create(:issue_customer_relations_contact, issue: issue, contact: contact) } it '/remove_contacts is available' do _, explanations = service.explain(remove_contacts, issue) expect(explanations).to contain_exactly("Remove customer relation contact(s).") end end end end context 'with keep_actions' do let(:content) { '/close' } it 'keeps quick actions' do content_result, explanations = service.explain(content, issue, keep_actions: true) expect(content_result).to eq("\n/close") expect(explanations).to eq(['Closes this issue.']) end it 'removes the quick action' do content_result, explanations = service.explain(content, issue, keep_actions: false) expect(content_result).to eq('') expect(explanations).to eq(['Closes this issue.']) end end describe 'type command' do let_it_be(:project) { create(:project, :private) } let_it_be(:work_item) { create(:work_item, :task, project: project) } let(:command) { '/type issue' } it 'has command available' do _, explanations = service.explain(command, work_item) expect(explanations) .to contain_exactly("Converts work item to issue. Widgets not supported in new type are removed.") end end describe 'relate and unlink commands' do let_it_be(:other_issue) { create(:issue, project: project).to_reference(issue) } let(:relate_content) { "/relate #{other_issue}" } let(:unlink_content) { "/unlink #{other_issue}" } context 'when user has permissions' do it '/relate command is available' do _, explanations = service.explain(relate_content, issue) expect(explanations).to eq(["Marks this issue as related to #{other_issue}."]) end it '/unlink command is available' do _, explanations = service.explain(unlink_content, issue) expect(explanations).to eq(["Removes link with #{other_issue}."]) end end context 'when user has insufficient permissions' do before do allow(Ability).to receive(:allowed?).and_call_original allow(Ability).to receive(:allowed?).with(current_user, :admin_issue_link, issue).and_return(false) end it '/relate command is not available' do _, explanations = service.explain(relate_content, issue) expect(explanations).to be_empty end it '/unlink command is not available' do _, explanations = service.explain(unlink_content, issue) expect(explanations).to be_empty end end end describe 'promote_to command' do let(:content) { '/promote_to issue' } context 'when work item supports promotion' do let_it_be(:task) { build(:work_item, :task, project: project) } it 'includes the value' do _, explanations = service.explain(content, task) expect(explanations).to eq(['Promotes work item to issue.']) end end context 'when work item does not support promotion' do let_it_be(:incident) { build(:work_item, :incident, project: project) } it 'does not include the value' do _, explanations = service.explain(content, incident) expect(explanations).to be_empty end end end describe '/set_parent command' do let_it_be(:parent) { create(:work_item, :issue, project: project) } let_it_be(:work_item) { create(:work_item, :task, project: project) } let_it_be(:parent_ref) { parent.to_reference(project) } let(:command) { "/set_parent #{parent_ref}" } shared_examples 'command is available' do it 'explanation contains correct message' do _, explanations = service.explain(command, work_item) expect(explanations) .to contain_exactly("Change work item's parent to #{parent_ref}.") end it 'contains command' do expect(service.available_commands(work_item)).to include(a_hash_including(name: :set_parent)) end end shared_examples 'command is not available' do it 'explanation is empty' do _, explanations = service.explain(command, work_item) expect(explanations).to eq([]) end it 'does not contain command' do expect(service.available_commands(work_item)).not_to include(a_hash_including(name: :set_parent)) end end context 'when user can admin link' do it_behaves_like 'command is available' context 'when work item type does not support a parent' do let_it_be(:work_item) { build(:work_item, :incident, project: project) } it_behaves_like 'command is not available' end end context 'when user cannot admin link' do subject(:service) { described_class.new(project, create(:user)) } it_behaves_like 'command is not available' end end describe '/add_child command' do let_it_be(:child) { create(:work_item, :issue, project: project) } let_it_be(:work_item) { create(:work_item, :objective, project: project) } let_it_be(:child_ref) { child.to_reference(project) } let(:command) { "/add_child #{child_ref}" } shared_examples 'command is available' do it 'explanation contains correct message' do _, explanations = service.explain(command, work_item) expect(explanations) .to contain_exactly("Add #{child_ref} to this work item as child(ren).") end it 'contains command' do expect(service.available_commands(work_item)).to include(a_hash_including(name: :add_child)) end end shared_examples 'command is not available' do it 'explanation is empty' do _, explanations = service.explain(command, work_item) expect(explanations).to eq([]) end it 'does not contain command' do expect(service.available_commands(work_item)).not_to include(a_hash_including(name: :add_child)) end end context 'when user can admin link' do it_behaves_like 'command is available' context 'when work item type does not support children' do let_it_be(:work_item) { build(:work_item, :key_result, project: project) } it_behaves_like 'command is not available' end end context 'when user cannot admin link' do subject(:service) { described_class.new(project, create(:user)) } it_behaves_like 'command is not available' end end end describe '#available_commands' do context 'when Guest is creating a new issue' do let_it_be(:guest) { create(:user) } let(:issue) { build(:issue, project: public_project) } let(:service) { described_class.new(project, guest) } before_all do public_project.add_guest(guest) end it 'includes commands to set metadata' do # milestone action is only available when project has a milestone milestone available_commands = service.available_commands(issue) expect(available_commands).to include( a_hash_including(name: :label), a_hash_including(name: :milestone), a_hash_including(name: :copy_metadata), a_hash_including(name: :assign), a_hash_including(name: :due) ) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class ExportCsvService < ExportCsv::BaseService include Gitlab::Routing.url_helpers include GitlabRoutingHelper def initialize(relation, resource_parent, user = nil) super(relation, resource_parent) @labels = objects.labels_hash.transform_values { |labels| labels.sort.join(',').presence } end def email(mail_to_user) Notify.issues_csv_email(mail_to_user, resource_parent, csv_data, csv_builder.status).deliver_now end private def associations_to_preload [:work_item_type, :author, :assignees, :timelogs, :milestone, { project: { namespace: :route } }] end def header_to_value_hash { 'Title' => 'title', 'Description' => 'description', 'Issue ID' => 'iid', 'URL' => -> (issue) { issue_url(issue) }, 'State' => -> (issue) { issue.closed? ? 'Closed' : 'Open' }, 'Author' => 'author_name', 'Author Username' => -> (issue) { issue.author&.username }, 'Assignee' => -> (issue) { issue.assignees.map(&:name).join(', ') }, 'Assignee Username' => -> (issue) { issue.assignees.map(&:username).join(', ') }, 'Confidential' => -> (issue) { issue.confidential? ? 'Yes' : 'No' }, 'Locked' => -> (issue) { issue.discussion_locked? ? 'Yes' : 'No' }, 'Due Date' => -> (issue) { issue.due_date&.to_fs(:csv) }, 'Created At (UTC)' => -> (issue) { issue.created_at&.to_fs(:csv) }, 'Updated At (UTC)' => -> (issue) { issue.updated_at&.to_fs(:csv) }, 'Closed At (UTC)' => -> (issue) { issue.closed_at&.to_fs(:csv) }, 'Milestone' => -> (issue) { issue.milestone&.title }, 'Weight' => -> (issue) { issue.weight }, 'Labels' => -> (issue) { issue_labels(issue) }, 'Time Estimate' => ->(issue) { issue.time_estimate.to_fs(:csv) }, 'Time Spent' => -> (issue) { issue_time_spent(issue) } } end def issue_labels(issue) @labels[issue.id] end # rubocop: disable CodeReuse/ActiveRecord def issue_time_spent(issue) issue.timelogs.sum(&:time_spent) end # rubocop: enable CodeReuse/ActiveRecord def preload_associations_in_batches? Feature.enabled?(:export_csv_preload_in_batches, resource_parent) end end end Issues::ExportCsvService.prepend_mod_with('Issues::ExportCsvService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::ExportCsvService, :with_license, feature_category: :team_planning do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group) } let_it_be(:project) { create(:project, :public, group: group) } let_it_be(:issue) { create(:issue, project: project, author: user) } let_it_be(:bad_issue) { create(:issue, project: project, author: user) } subject { described_class.new(Issue.all, project) } it 'renders csv to string' do expect(subject.csv_data).to be_a String end describe '#email' do it 'emails csv' do expect { subject.email(user) }.to change(ActionMailer::Base.deliveries, :count) end it 'renders with a target filesize' do expect_next_instance_of(CsvBuilder) do |csv_builder| expect(csv_builder).to receive(:render).with(described_class::TARGET_FILESIZE).once end subject.email(user) end end def csv CSV.parse(subject.csv_data, headers: true) end context 'includes' do let_it_be(:milestone) { create(:milestone, title: 'v1.0', project: project) } let_it_be(:idea_label) { create(:label, project: project, title: 'Idea') } let_it_be(:feature_label) { create(:label, project: project, title: 'Feature') } before_all do # Creating a timelog touches the updated_at timestamp of issue, # so create these first. issue.timelogs.create!(time_spent: 360, user: user) issue.timelogs.create!(time_spent: 200, user: user) issue.update!( milestone: milestone, assignees: [user], description: 'Issue with details', state: :opened, due_date: DateTime.new(2014, 3, 2), created_at: DateTime.new(2015, 4, 3, 2, 1, 0), updated_at: DateTime.new(2016, 5, 4, 3, 2, 1), closed_at: DateTime.new(2017, 6, 5, 4, 3, 2), weight: 4, discussion_locked: true, labels: [feature_label, idea_label], time_estimate: 72000 ) end shared_examples 'exports CSVs for issues' do it 'includes the columns required for import' do expect(csv.headers).to include('Title', 'Description') end it 'returns two issues' do expect(csv.count).to eq(2) end specify 'iid' do expect(csv[0]['Issue ID']).to eq issue.iid.to_s end specify 'url' do expect(csv[0]['URL']).to match(/http.*#{project.full_path}.*#{issue.iid}/) end specify 'title' do expect(csv[0]['Title']).to eq issue.title end specify 'state' do expect(csv[0]['State']).to eq 'Open' end specify 'description' do expect(csv[0]['Description']).to eq issue.description expect(csv[1]['Description']).to eq nil end specify 'author name' do expect(csv[0]['Author']).to eq issue.author_name end specify 'author username' do expect(csv[0]['Author Username']).to eq issue.author.username end specify 'assignee name' do expect(csv[0]['Assignee']).to eq user.name expect(csv[1]['Assignee']).to eq '' end specify 'assignee username' do expect(csv[0]['Assignee Username']).to eq user.username expect(csv[1]['Assignee Username']).to eq '' end specify 'confidential' do expect(csv[0]['Confidential']).to eq 'No' end specify 'milestone' do expect(csv[0]['Milestone']).to eq issue.milestone.title expect(csv[1]['Milestone']).to eq nil end specify 'labels' do expect(csv[0]['Labels']).to eq 'Feature,Idea' expect(csv[1]['Labels']).to eq nil end specify 'due_date' do expect(csv[0]['Due Date']).to eq '2014-03-02' expect(csv[1]['Due Date']).to eq nil end specify 'created_at' do expect(csv[0]['Created At (UTC)']).to eq '2015-04-03 02:01:00' end specify 'updated_at' do expect(csv[0]['Updated At (UTC)']).to eq '2016-05-04 03:02:01' end specify 'closed_at' do expect(csv[0]['Closed At (UTC)']).to eq '2017-06-05 04:03:02' expect(csv[1]['Closed At (UTC)']).to eq nil end specify 'discussion_locked' do expect(csv[0]['Locked']).to eq 'Yes' end specify 'weight' do expect(csv[0]['Weight']).to eq '4' end specify 'time estimate' do expect(csv[0]['Time Estimate']).to eq '72000' expect(csv[1]['Time Estimate']).to eq '0' end specify 'time spent' do expect(csv[0]['Time Spent']).to eq '560' expect(csv[1]['Time Spent']).to eq '0' end context 'with issues filtered by labels and project' do subject do described_class.new( IssuesFinder.new(user, project_id: project.id, label_name: %w[Idea Feature]).execute, project ) end it 'returns only filtered objects' do expect(csv.count).to eq(1) expect(csv[0]['Issue ID']).to eq issue.iid.to_s end end context 'with label links' do let(:labeled_issues) { create_list(:labeled_issue, 2, project: project, author: user, labels: [feature_label, idea_label]) } it 'does not run a query for each label link' do control_count = ActiveRecord::QueryRecorder.new { csv }.count labeled_issues expect { csv }.not_to exceed_query_limit(control_count) expect(csv.count).to eq(4) end it 'returns the labels in sorted order' do labeled_issues labeled_rows = csv.select { |entry| labeled_issues.map(&:iid).include?(entry['Issue ID'].to_i) } expect(labeled_rows.count).to eq(2) expect(labeled_rows.map { |entry| entry['Labels'] }).to all(eq("Feature,Idea")) end end end context 'with export_csv_preload_in_batches feature flag disabled' do before do stub_feature_flags(export_csv_preload_in_batches: false) end it_behaves_like 'exports CSVs for issues' end context 'with export_csv_preload_in_batches feature flag enabled' do it_behaves_like 'exports CSVs for issues' end end context 'with minimal details' do it 'renders labels as nil' do expect(csv[0]['Labels']).to eq nil end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class ReferencedMergeRequestsService < Issues::BaseService # rubocop: disable CodeReuse/ActiveRecord def execute(issue) referenced = referenced_merge_requests(issue) closed_by = closed_by_merge_requests(issue) ActiveRecord::Associations::Preloader.new( records: referenced + closed_by, associations: { head_pipeline: { project: [:route, { namespace: :route }] } } ).call [sort_by_iid(referenced), sort_by_iid(closed_by)] end # rubocop: enable CodeReuse/ActiveRecord def referenced_merge_requests(issue) merge_requests = extract_merge_requests(issue) cross_project_filter = -> (merge_requests) do merge_requests.select { |mr| mr.target_project == project } end Ability.merge_requests_readable_by_user( merge_requests, current_user, filters: { read_cross_project: cross_project_filter } ) end # rubocop: disable CodeReuse/ActiveRecord def closed_by_merge_requests(issue) return [] unless issue.open? merge_requests = extract_merge_requests(issue, filter: :system).select(&:open?) return [] if merge_requests.empty? ids = MergeRequestsClosingIssues.where(merge_request_id: merge_requests.map(&:id), issue_id: issue.id).pluck(:merge_request_id) merge_requests.select { |mr| mr.id.in?(ids) } end # rubocop: enable CodeReuse/ActiveRecord private def extract_merge_requests(issue, filter: nil) ext = issue.all_references(current_user) notes = issue_notes(issue) notes = notes.select(&filter) if filter notes.each do |note| note.all_references(current_user, extractor: ext) end ext.merge_requests end # rubocop: disable CodeReuse/ActiveRecord def issue_notes(issue) @issue_notes ||= {} @issue_notes[issue] ||= issue.notes.includes(:author) end # rubocop: enable CodeReuse/ActiveRecord def sort_by_iid(merge_requests) Gitlab::IssuableSorter.sort(project, merge_requests) { |mr| mr.iid.to_s } end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::ReferencedMergeRequestsService, feature_category: :team_planning do def create_referencing_mr(attributes = {}) create(:merge_request, attributes).tap do |merge_request| create(:note, :system, project: project, noteable: issue, author: user, note: merge_request.to_reference(full: true)) end end def create_closing_mr(attributes = {}) create_referencing_mr(attributes).tap do |merge_request| create(:merge_requests_closing_issues, issue: issue, merge_request: merge_request) end end let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :public, :repository) } let_it_be(:other_project) { create(:project, :public, :repository) } let_it_be(:issue) { create(:issue, author: user, project: project) } let_it_be(:closing_mr) { create_closing_mr(source_project: project) } let_it_be(:closing_mr_other_project) { create_closing_mr(source_project: other_project) } let_it_be(:referencing_mr) { create_referencing_mr(source_project: project, source_branch: 'csv') } let_it_be(:referencing_mr_other_project) { create_referencing_mr(source_project: other_project, source_branch: 'csv') } let(:service) { described_class.new(container: project, current_user: user) } describe '#execute' do it 'returns a list of sorted merge requests' do mrs, closed_by_mrs = service.execute(issue) expect(mrs).to eq([closing_mr, referencing_mr, closing_mr_other_project, referencing_mr_other_project]) expect(closed_by_mrs).to eq([closing_mr, closing_mr_other_project]) end context 'performance' do it 'does not run extra queries when extra namespaces are included', :use_clean_rails_memory_store_caching do service.execute(issue) # warm cache control_count = ActiveRecord::QueryRecorder.new { service.execute(issue) }.count third_project = create(:project, :public) create_closing_mr(source_project: third_project) service.execute(issue) # warm cache expect { service.execute(issue) }.not_to exceed_query_limit(control_count) end it 'preloads the head pipeline for each merge request, and its routes' do # Hack to ensure no data is preserved on issue before starting the spec, # to avoid false negatives reloaded_issue = Issue.find(issue.id) pipeline_routes = lambda do |merge_requests| merge_requests.map { |mr| mr.head_pipeline&.project&.full_path } end closing_mr_other_project.update!(head_pipeline: create(:ci_pipeline)) control_count = ActiveRecord::QueryRecorder.new { service.execute(reloaded_issue).each(&pipeline_routes) } closing_mr.update!(head_pipeline: create(:ci_pipeline)) expect { service.execute(issue).each(&pipeline_routes) } .not_to exceed_query_limit(control_count) end it 'only loads issue notes once' do expect(issue).to receive(:notes).once.and_call_original service.execute(issue) end end end describe '#referenced_merge_requests' do it 'returns the referenced merge requests' do expect(service.referenced_merge_requests(issue)).to match_array([ closing_mr, closing_mr_other_project, referencing_mr, referencing_mr_other_project ]) end it 'excludes cross project references if the user cannot read cross project' do allow(Ability).to receive(:allowed?).and_call_original expect(Ability).to receive(:allowed?).with(user, :read_cross_project).at_least(:once).and_return(false) expect(service.referenced_merge_requests(issue)).not_to include(closing_mr_other_project) expect(service.referenced_merge_requests(issue)).not_to include(referencing_mr_other_project) end context 'performance' do it 'does not run a query for each note author', :use_clean_rails_memory_store_caching do service.referenced_merge_requests(issue) # warm cache control_count = ActiveRecord::QueryRecorder.new { service.referenced_merge_requests(issue) }.count create(:note, project: project, noteable: issue, author: create(:user)) service.referenced_merge_requests(issue) # warm cache expect { service.referenced_merge_requests(issue) }.not_to exceed_query_limit(control_count) end end end describe '#closed_by_merge_requests' do let(:closed_issue) { build(:issue, :closed, project: project) } it 'returns the open merge requests that close this issue' do create_closing_mr(source_project: project, state: 'closed') expect(service.closed_by_merge_requests(issue)).to match_array([closing_mr, closing_mr_other_project]) end it 'returns an empty array when the current issue is closed already' do expect(service.closed_by_merge_requests(closed_issue)).to eq([]) end context 'performance' do it 'does not run a query for each note author', :use_clean_rails_memory_store_caching do service.closed_by_merge_requests(issue) # warm cache control_count = ActiveRecord::QueryRecorder.new { service.closed_by_merge_requests(issue) }.count create(:note, :system, project: project, noteable: issue, author: create(:user)) service.closed_by_merge_requests(issue) # warm cache expect { service.closed_by_merge_requests(issue) }.not_to exceed_query_limit(control_count) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true # This service fetches all branches containing the current issue's ID, except for # those with a merge request open referencing the current issue. module Issues class RelatedBranchesService < Issues::BaseService def execute(issue) branch_names_with_mrs = branches_with_merge_request_for(issue) branches = branches_with_iid_of(issue).reject { |b| branch_names_with_mrs.include?(b[:name]) } branches.map { |branch| branch_data(branch) } end private def branch_data(branch) { name: branch[:name], pipeline_status: pipeline_status(branch) } end def pipeline_status(branch) pipeline = project.latest_pipeline(branch[:name], branch[:target]) pipeline.detailed_status(current_user) if can?(current_user, :read_pipeline, pipeline) end def branches_with_merge_request_for(issue) Issues::ReferencedMergeRequestsService .new(container: project, current_user: current_user) .referenced_merge_requests(issue) .map(&:source_branch) end def branches_with_iid_of(issue) branch_ref_regex = /\A#{Gitlab::Git::BRANCH_REF_PREFIX}#{issue.iid}-(?!\d+-stable)/i return [] unless project.repository.exists? project.repository.list_refs( [Gitlab::Git::BRANCH_REF_PREFIX + "#{issue.iid}-*"] ).each_with_object([]) do |ref, results| if ref.name.match?(branch_ref_regex) results << { name: ref.name.delete_prefix(Gitlab::Git::BRANCH_REF_PREFIX), target: ref.target } end end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::RelatedBranchesService, feature_category: :team_planning do let_it_be(:project) { create(:project, :repository, :public, public_builds: false) } let_it_be(:developer) { create(:user) } let_it_be(:issue) { create(:issue, project: project) } let(:user) { developer } subject { described_class.new(container: project, current_user: user) } before_all do project.add_developer(developer) end describe '#execute' do let(:branch_info) { subject.execute(issue) } context 'branches are available' do let_it_be(:pipeline) { create(:ci_pipeline, :success, project: project, ref: issue.to_branch_name) } before_all do project.repository.create_branch(issue.to_branch_name, pipeline.sha) project.repository.create_branch("#{issue.iid}doesnt-match", project.repository.root_ref) project.repository.create_branch("#{issue.iid}-0-stable", project.repository.root_ref) project.repository.add_tag(developer, issue.to_branch_name, pipeline.sha) end context 'when user has access to pipelines' do it 'selects relevant branches, along with pipeline status' do expect(branch_info).to contain_exactly( { name: issue.to_branch_name, pipeline_status: an_instance_of(Gitlab::Ci::Status::Success) } ) end end context 'when user does not have access to pipelines' do let(:user) { create(:user) } it 'returns branches without pipeline status' do expect(branch_info).to contain_exactly( { name: issue.to_branch_name, pipeline_status: nil } ) end end it 'excludes branches referenced in merge requests' do merge_request = create(:merge_request, { description: "Closes #{issue.to_reference}", source_project: issue.project, source_branch: issue.to_branch_name }) merge_request.create_cross_references!(user) referenced_merge_requests = Issues::ReferencedMergeRequestsService .new(container: issue.project, current_user: user) .referenced_merge_requests(issue) expect(referenced_merge_requests).not_to be_empty expect(branch_info.pluck(:name)).not_to include(merge_request.source_branch) end end context 'no branches are available' do let(:project) { create(:project, :empty_repo) } it 'returns an empty array' do expect(branch_info).to be_empty end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class ReopenService < Issues::BaseService def execute(issue, skip_authorization: false) return issue unless can_reopen?(issue, skip_authorization: skip_authorization) if issue.reopen event_service.reopen_issue(issue, current_user) if current_user.project_bot? log_audit_event(issue, current_user, "#{issue.issue_type}_reopened_by_project_bot", "Reopened #{issue.issue_type.humanize(capitalize: false)} #{issue.title}") end create_note(issue, 'reopened') notification_service.async.reopen_issue(issue, current_user) perform_incident_management_actions(issue) execute_hooks(issue, 'reopen') invalidate_cache_counts(issue, users: issue.assignees) issue.update_project_counter_caches Milestones::ClosedIssuesCountService.new(issue.milestone).delete_cache if issue.milestone track_incident_action(current_user, issue, :incident_reopened) end issue end private def can_reopen?(issue, skip_authorization: false) skip_authorization || can?(current_user, :reopen_issue, issue) end def perform_incident_management_actions(issue) return unless issue.work_item_type&.incident? create_timeline_event(issue) end def create_note(issue, state = issue.state) SystemNoteService.change_status(issue, issue.project, current_user, state, nil) end def create_timeline_event(issue) IncidentManagement::TimelineEvents::CreateService.reopen_incident(issue, current_user) end end end Issues::ReopenService.prepend_mod_with('Issues::ReopenService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::ReopenService, feature_category: :team_planning do let(:project) { create(:project) } let(:issue) { create(:issue, :closed, project: project) } describe '#execute' do context 'when user is not authorized to reopen issue' do it 'does not reopen the issue' do guest = create(:user) project.add_guest(guest) described_class.new(container: project, current_user: guest).execute(issue) expect(issue).to be_closed end context 'when skip_authorization is true' do it 'does close the issue even if user is not authorized' do non_authorized_user = create(:user) service = described_class.new(container: project, current_user: non_authorized_user) expect do service.execute(issue, skip_authorization: true) end.to change { issue.reload.state }.from('closed').to('opened') end end end context 'when user is authorized to reopen issue' do let(:user) { create(:user) } subject(:execute) { described_class.new(container: project, current_user: user).execute(issue) } before do project.add_maintainer(user) end it 'invalidates counter cache for assignees' do issue.assignees << user expect_any_instance_of(User).to receive(:invalidate_issue_cache_counts) execute end it 'refreshes the number of opened issues' do expect do execute BatchLoader::Executor.clear_current end.to change { project.open_issues_count }.from(0).to(1) end it 'deletes milestone issue counters cache' do issue.update!(milestone: create(:milestone, project: project)) expect_next_instance_of(Milestones::ClosedIssuesCountService, issue.milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end execute end it 'does not create timeline event' do expect { execute }.not_to change { issue.incident_management_timeline_events.count } end it 'does not call GroupMentionWorker' do expect(Integrations::GroupMentionWorker).not_to receive(:perform_async) issue end context 'issue is incident type' do let(:issue) { create(:incident, :closed, project: project) } let(:current_user) { user } it_behaves_like 'an incident management tracked event', :incident_management_incident_reopened it_behaves_like 'Snowplow event tracking with RedisHLL context' do let(:namespace) { issue.namespace } let(:category) { described_class.to_s } let(:action) { 'incident_management_incident_reopened' } let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' } end it 'creates a timeline event' do expect(IncidentManagement::TimelineEvents::CreateService) .to receive(:reopen_incident) .with(issue, current_user) .and_call_original expect { execute }.to change { issue.incident_management_timeline_events.count }.by(1) end end context 'when issue is not confidential' do let(:expected_payload) do include( event_type: 'issue', object_kind: 'issue', changes: { closed_at: { current: nil, previous: kind_of(Time) }, state_id: { current: 1, previous: 2 }, updated_at: { current: kind_of(Time), previous: kind_of(Time) } }, object_attributes: include( state: 'opened', action: 'reopen' ) ) end it 'executes issue hooks' do expect(project.project_namespace).to receive(:execute_hooks).with(expected_payload, :issue_hooks) expect(project.project_namespace).to receive(:execute_integrations).with(expected_payload, :issue_hooks) execute end end context 'when issue is confidential' do let(:issue) { create(:issue, :confidential, :closed, project: project) } it 'executes confidential issue hooks' do issue_hooks = :confidential_issue_hooks expect(project.project_namespace).to receive(:execute_hooks).with(an_instance_of(Hash), issue_hooks) expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), issue_hooks) execute end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class DuplicateService < Issues::BaseService def execute(duplicate_issue, canonical_issue) return if canonical_issue == duplicate_issue return unless can?(current_user, :update_issue, duplicate_issue) return unless can?(current_user, :create_note, canonical_issue) create_issue_duplicate_note(duplicate_issue, canonical_issue) create_issue_canonical_note(canonical_issue, duplicate_issue) close_service.new(container: project, current_user: current_user).execute(duplicate_issue) duplicate_issue.update(duplicated_to: canonical_issue) relate_two_issues(duplicate_issue, canonical_issue) end private def create_issue_duplicate_note(duplicate_issue, canonical_issue) SystemNoteService.mark_duplicate_issue(duplicate_issue, duplicate_issue.project, current_user, canonical_issue) end def create_issue_canonical_note(canonical_issue, duplicate_issue) SystemNoteService.mark_canonical_issue_of_duplicate(canonical_issue, canonical_issue.project, current_user, duplicate_issue) end def relate_two_issues(duplicate_issue, canonical_issue) params = { target_issuable: canonical_issue } IssueLinks::CreateService.new(duplicate_issue, current_user, params).execute end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::DuplicateService, feature_category: :team_planning do let(:user) { create(:user) } let(:canonical_project) { create(:project) } let(:duplicate_project) { create(:project) } let(:canonical_issue) { create(:issue, project: canonical_project) } let(:duplicate_issue) { create(:issue, project: duplicate_project) } subject { described_class.new(container: duplicate_project, current_user: user) } describe '#execute' do context 'when the issues passed are the same' do it 'does nothing' do expect(subject).not_to receive(:close_service) expect(SystemNoteService).not_to receive(:mark_duplicate_issue) expect(SystemNoteService).not_to receive(:mark_canonical_issue_of_duplicate) subject.execute(duplicate_issue, duplicate_issue) end end context 'when the user cannot update the duplicate issue' do before do canonical_project.add_reporter(user) end it 'does nothing' do expect(subject).not_to receive(:close_service) expect(SystemNoteService).not_to receive(:mark_duplicate_issue) expect(SystemNoteService).not_to receive(:mark_canonical_issue_of_duplicate) subject.execute(duplicate_issue, canonical_issue) end end context 'when the user cannot comment on the canonical issue' do before do duplicate_project.add_reporter(user) end it 'does nothing' do expect(subject).not_to receive(:close_service) expect(SystemNoteService).not_to receive(:mark_duplicate_issue) expect(SystemNoteService).not_to receive(:mark_canonical_issue_of_duplicate) subject.execute(duplicate_issue, canonical_issue) end end context 'when the user can mark the issue as a duplicate' do before do canonical_project.add_reporter(user) duplicate_project.add_reporter(user) end it 'closes the duplicate issue' do subject.execute(duplicate_issue, canonical_issue) expect(duplicate_issue.reload).to be_closed expect(canonical_issue.reload).to be_open end it 'adds a system note to the duplicate issue' do expect(SystemNoteService) .to receive(:mark_duplicate_issue).with(duplicate_issue, duplicate_project, user, canonical_issue) subject.execute(duplicate_issue, canonical_issue) end it 'adds a system note to the canonical issue' do expect(SystemNoteService) .to receive(:mark_canonical_issue_of_duplicate).with(canonical_issue, canonical_project, user, duplicate_issue) subject.execute(duplicate_issue, canonical_issue) end it 'updates duplicate issue with canonical issue id' do subject.execute(duplicate_issue, canonical_issue) expect(duplicate_issue.reload.duplicated_to).to eq(canonical_issue) end it 'relates the duplicate issues' do canonical_project.add_reporter(user) duplicate_project.add_reporter(user) subject.execute(duplicate_issue, canonical_issue) issue_link = IssueLink.last expect(issue_link.source).to eq(duplicate_issue) expect(issue_link.target).to eq(canonical_issue) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class SetCrmContactsService < ::BaseProjectService MAX_ADDITIONAL_CONTACTS = 6 # Replacing contacts by email is not currently supported def execute(issue) @issue = issue @errors = [] return error_no_permissions unless allowed? return error_invalid_params unless valid_params? @existing_ids = issue.customer_relations_contact_ids determine_changes if set_present? return error_too_many if too_many? @added_count = 0 @removed_count = 0 add if params[:add_ids].present? remove if params[:remove_ids].present? add_by_email if params[:add_emails].present? remove_by_email if params[:remove_emails].present? if issue.valid? GraphqlTriggers.issue_crm_contacts_updated(issue) issue.touch create_system_note ServiceResponse.success(payload: issue) else # The default error isn't very helpful: "Issue customer relations contacts is invalid" issue.errors.delete(:issue_customer_relations_contacts) issue.errors.add(:issue_customer_relations_contacts, errors.to_sentence) ServiceResponse.error(payload: issue, message: issue.errors.full_messages.to_sentence) end end private attr_accessor :issue, :errors, :existing_ids, :added_count, :removed_count def determine_changes params[:add_ids] = params[:replace_ids] - existing_ids params[:remove_ids] = existing_ids - params[:replace_ids] end def add add_by_id(params[:add_ids]) end def add_by_email contact_ids = ::CustomerRelations::Contact.find_ids_by_emails(project_group.root_ancestor, emails(:add_emails)) add_by_id(contact_ids) end def emails(key) params[key].map do |email| extract_email_from_request_param(email) end end def add_by_id(contact_ids) contact_ids -= existing_ids contact_ids.uniq.each do |contact_id| issue_contact = issue.issue_customer_relations_contacts.create(contact_id: contact_id) if issue_contact.persisted? @added_count += 1 else # The validation ensures that the id exists and the user has permission errors << "#{contact_id}: The resource that you are attempting to access does not exist or you don't have permission to perform this action" end end end def remove remove_by_id(params[:remove_ids]) end def remove_by_email contact_ids = ::CustomerRelations::IssueContact.find_contact_ids_by_emails(issue.id, emails(:remove_emails)) remove_by_id(contact_ids) end def remove_by_id(contact_ids) contact_ids &= existing_ids @removed_count += issue.issue_customer_relations_contacts .where(contact_id: contact_ids) # rubocop: disable CodeReuse/ActiveRecord .delete_all end def extract_email_from_request_param(email_param) email_param.delete_prefix(::CustomerRelations::Contact.reference_prefix_quoted) .delete_prefix(::CustomerRelations::Contact.reference_prefix) .delete_suffix(::CustomerRelations::Contact.reference_postfix) .tr('"', '') end def allowed? current_user&.can?(:set_issue_crm_contacts, issue) end def valid_params? set_present? ^ add_or_remove_present? end def set_present? !params[:replace_ids].nil? end def add_or_remove_present? add_present? || remove_present? end def add_present? params[:add_ids].present? || params[:add_emails].present? end def remove_present? params[:remove_ids].present? || params[:remove_emails].present? end def too_many? too_many_ids? || too_many_emails? end def too_many_ids? params[:add_ids] && params[:add_ids].length > MAX_ADDITIONAL_CONTACTS end def too_many_emails? params[:add_emails] && params[:add_emails].length > MAX_ADDITIONAL_CONTACTS end def create_system_note SystemNoteService.change_issuable_contacts( issue, issue.project, current_user, added_count, removed_count) end def error_no_permissions ServiceResponse.error(message: _('You have insufficient permissions to set customer relations contacts for this issue')) end def error_invalid_params ServiceResponse.error(message: _('You cannot combine replace_ids with add_ids or remove_ids')) end def error_too_many ServiceResponse.error(payload: issue, message: _("You can only add up to %{max_contacts} contacts at one time" % { max_contacts: MAX_ADDITIONAL_CONTACTS })) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::SetCrmContactsService, feature_category: :team_planning do let_it_be(:user) { create(:user) } let_it_be(:group) { create(:group, :crm_enabled) } let_it_be(:project) { create(:project, group: create(:group, :crm_enabled, parent: group)) } let_it_be(:contacts) { create_list(:contact, 4, group: group) } let_it_be(:issue, reload: true) { create(:issue, project: project) } let_it_be(:issue_contact_1) do create(:issue_customer_relations_contact, issue: issue, contact: contacts[0]).contact end let_it_be(:issue_contact_2) do create(:issue_customer_relations_contact, issue: issue, contact: contacts[1]).contact end let(:does_not_exist_or_no_permission) { "The resource that you are attempting to access does not exist or you don't have permission to perform this action" } subject(:set_crm_contacts) do described_class.new(project: project, current_user: user, params: params).execute(issue) end describe '#execute' do shared_examples 'setting contacts' do it 'updates the issue with correct contacts' do response = set_crm_contacts expect(response).to be_success expect(issue.customer_relations_contacts).to match_array(expected_contacts) end end shared_examples 'adds system note' do |added_count, removed_count| it 'calls SystemNoteService.change_issuable_contacts with correct counts' do expect(SystemNoteService) .to receive(:change_issuable_contacts) .with(issue, project, user, added_count, removed_count) set_crm_contacts end end context 'when the user has no permission' do let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } } it 'returns expected error response' do response = set_crm_contacts expect(response).to be_error expect(response.message).to eq('You have insufficient permissions to set customer relations contacts for this issue') end end context 'when user has permission' do before do group.add_reporter(user) end context 'but the crm setting is disabled' do let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } } let(:subgroup_with_crm_disabled) { create(:group, parent: group) } let(:project_with_crm_disabled) { create(:project, group: subgroup_with_crm_disabled) } let(:issue_with_crm_disabled) { create(:issue, project: project_with_crm_disabled) } it 'returns expected error response' do response = described_class.new(project: project_with_crm_disabled, current_user: user, params: params).execute(issue_with_crm_disabled) expect(response).to be_error expect(response.message).to eq('You have insufficient permissions to set customer relations contacts for this issue') end end context 'when the contact does not exist' do let(:params) { { replace_ids: [non_existing_record_id] } } it 'returns expected error response' do response = set_crm_contacts expect(response).to be_error expect(response.message).to eq("Issue customer relations contacts #{non_existing_record_id}: #{does_not_exist_or_no_permission}") end end context 'when the contact belongs to a different group' do let(:group2) { create(:group) } let(:contact) { create(:contact, group: group2) } let(:params) { { replace_ids: [contact.id] } } before do group2.add_reporter(user) end it 'returns expected error response' do response = set_crm_contacts expect(response).to be_error expect(response.message).to eq("Issue customer relations contacts #{contact.id}: #{does_not_exist_or_no_permission}") end end context 'replace' do let(:params) { { replace_ids: [contacts[1].id, contacts[2].id] } } let(:expected_contacts) { [contacts[1], contacts[2]] } it_behaves_like 'setting contacts' it_behaves_like 'adds system note', 1, 1 context 'with empty list' do let(:params) { { replace_ids: [] } } let(:expected_contacts) { [] } it_behaves_like 'setting contacts' it_behaves_like 'adds system note', 0, 2 end end context 'add' do let(:added_contact) { contacts[3] } let(:params) { { add_ids: [added_contact.id] } } let(:expected_contacts) { [issue_contact_1, issue_contact_2, added_contact] } it_behaves_like 'setting contacts' it_behaves_like 'adds system note', 1, 0 end context 'add by email' do let(:added_contact) { contacts[3] } let(:expected_contacts) { [issue_contact_1, issue_contact_2, added_contact] } context 'with pure emails in params' do let(:params) { { add_emails: [contacts[3].email] } } it_behaves_like 'setting contacts' it_behaves_like 'adds system note', 1, 0 end context 'with autocomplete prefix emails in params' do let(:params) { { add_emails: ["[\"contact:\"#{contacts[3].email}\"]"] } } it_behaves_like 'setting contacts' it_behaves_like 'adds system note', 1, 0 end end context 'remove' do let(:params) { { remove_ids: [contacts[0].id] } } let(:expected_contacts) { [contacts[1]] } it_behaves_like 'setting contacts' it_behaves_like 'adds system note', 0, 1 end context 'remove by email' do let(:expected_contacts) { [contacts[1]] } context 'with pure email in params' do let(:params) { { remove_emails: [contacts[0].email] } } it_behaves_like 'setting contacts' it_behaves_like 'adds system note', 0, 1 end context 'with autocomplete prefix and suffix email in params' do let(:params) { { remove_emails: ["[contact:#{contacts[0].email}]"] } } it_behaves_like 'setting contacts' it_behaves_like 'adds system note', 0, 1 end end context 'when attempting to add more than 6' do let(:id) { contacts[0].id } let(:params) { { add_ids: [id, id, id, id, id, id, id] } } it 'returns expected error message' do response = set_crm_contacts expect(response).to be_error expect(response.message).to eq('You can only add up to 6 contacts at one time') end end context 'when trying to remove non-existent contact' do let(:params) { { remove_ids: [non_existing_record_id] } } it 'returns expected error message' do response = set_crm_contacts expect(response).to be_success expect(response.message).to be_nil end end context 'when combining params' do let(:error_invalid_params) { 'You cannot combine replace_ids with add_ids or remove_ids' } let(:expected_contacts) { [contacts[0], contacts[3]] } context 'add and remove' do context 'with contact ids' do let(:params) { { remove_ids: [contacts[1].id], add_ids: [contacts[3].id] } } it_behaves_like 'setting contacts' end context 'with contact emails' do let(:params) { { remove_emails: [contacts[1].email], add_emails: ["[\"contact:#{contacts[3].email}]"] } } it_behaves_like 'setting contacts' end end context 'replace and remove' do let(:params) { { replace_ids: [contacts[3].id], remove_ids: [contacts[0].id] } } it 'returns expected error response' do response = set_crm_contacts expect(response).to be_error expect(response.message).to eq(error_invalid_params) end end context 'replace and add' do let(:params) { { replace_ids: [contacts[3].id], add_ids: [contacts[1].id] } } it 'returns expected error response' do response = set_crm_contacts expect(response).to be_error expect(response.message).to eq(error_invalid_params) end end end context 'when trying to add an existing issue contact' do let(:params) { { add_ids: [contacts[0].id] } } it 'does not return an error' do response = set_crm_contacts expect(response).to be_success end end context 'when trying to add the same contact twice' do let(:params) { { add_ids: [contacts[3].id, contacts[3].id] } } it 'does not return an error' do response = set_crm_contacts expect(response).to be_success end end context 'when trying to remove a contact not attached to the issue' do let(:params) { { remove_ids: [contacts[3].id] } } it 'does not return an error' do response = set_crm_contacts expect(response).to be_success end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class RelativePositionRebalancingService UPDATE_BATCH_SIZE = 100 PREFETCH_ISSUES_BATCH_SIZE = 10_000 SMALLEST_BATCH_SIZE = 5 RETRIES_LIMIT = 3 TooManyConcurrentRebalances = Class.new(StandardError) def initialize(projects) @projects_collection = (projects.is_a?(Array) ? Project.id_in(projects) : projects).select(:id).projects_order_id_asc @root_namespace = @projects_collection.select(:namespace_id).reorder(nil).take.root_namespace # rubocop:disable CodeReuse/ActiveRecord @caching = ::Gitlab::Issues::Rebalancing::State.new(@root_namespace, @projects_collection) end def execute # Given can_start_rebalance? and track_new_running_rebalance are not atomic # it can happen that we end up with more than Rebalancing::State::MAX_NUMBER_OF_CONCURRENT_REBALANCES running. # Considering the number of allowed Rebalancing::State::MAX_NUMBER_OF_CONCURRENT_REBALANCES is small we should be ok, # but should be something to consider if we'd want to scale this up. error_message = "#{caching.concurrent_running_rebalances_count} concurrent re-balances currently running" raise TooManyConcurrentRebalances, error_message unless caching.can_start_rebalance? block_issue_repositioning! unless root_namespace.issue_repositioning_disabled? caching.track_new_running_rebalance index = caching.get_current_index loop do issue_ids = get_issue_ids(index, PREFETCH_ISSUES_BATCH_SIZE) pairs_with_index = assign_indexes(issue_ids, index) pairs_with_index.each_slice(UPDATE_BATCH_SIZE) do |pairs_batch| update_positions_with_retry(pairs_batch, 're-balance issue positions in batches ordered by position') end index = caching.get_current_index break if index >= caching.issue_count - 1 end caching.cleanup_cache unblock_issue_repositioning! end private attr_reader :root_namespace, :projects_collection, :caching def block_issue_repositioning! Feature.enable(:block_issue_repositioning, root_namespace) end def unblock_issue_repositioning! Feature.disable(:block_issue_repositioning, root_namespace) end def get_issue_ids(index, limit) issue_ids = caching.get_cached_issue_ids(index, limit) # if we have a list of cached issues and no current project id cached, # then we successfully cached issues for all projects return issue_ids if issue_ids.any? && caching.get_current_project_id.blank? # if we got no issue ids at the start of re-balancing then we did not cache any issue ids yet preload_issue_ids caching.get_cached_issue_ids(index, limit) end # rubocop: disable CodeReuse/ActiveRecord def preload_issue_ids index = 0 cached_project_id = caching.get_current_project_id collection = projects_collection collection = projects_collection.where(Project.arel_table[:id].gteq(cached_project_id.to_i)) if cached_project_id.present? collection.each do |project| caching.cache_current_project_id(project.id) index += 1 scope = Issue.in_projects(project).order_by_relative_position.with_non_null_relative_position.select(:id, :relative_position) with_retry(PREFETCH_ISSUES_BATCH_SIZE, 100) do |batch_size| Gitlab::Pagination::Keyset::Iterator.new(scope: scope).each_batch(of: batch_size) do |batch| caching.cache_issue_ids(batch) end end end caching.remove_current_project_id_cache end # rubocop: enable CodeReuse/ActiveRecord def assign_indexes(ids, start_index) ids.each_with_index.map do |id, idx| [id, start_index + idx] end end # The method runs in a loop where we try for RETRIES_LIMIT=3 times, to run the update statement on # a number of records(batch size). Method gets an array of (id, value) pairs as argument that is used # to build the update query matching by id and updating relative_position = value. If we get a statement # timeout, we split the batch size in half and try(for 3 times again) to batch update on a smaller number of records. # On success, because we know the batch size and we always pick from the beginning of the array param, # we can remove first batch_size number of items from array and continue with the successful batch_size for next batches. # On failures we continue to split batch size to a SMALLEST_BATCH_SIZE limit, which is now set at 5. # # e.g. # 0. items | previous batch size|new batch size | comment # 1. 100 | 100 | 100 | 3 failures -> split the batch size in half # 2. 100 | 100 | 50 | 3 failures -> split the batch size in half again # 3. 100 | 50 | 25 | 3 succeed -> so we drop 25 items 3 times, 4th fails -> split the batch size in half again # 5. 25 | 25 | 12 | 3 failures -> split the batch size in half # 6. 25 | 12 | 6 | 3 failures -> we exit because smallest batch size is 5 and we'll be at 3 if we split again def update_positions_with_retry(pairs_with_index, query_name) retry_batch_size = pairs_with_index.size until pairs_with_index.empty? with_retry(retry_batch_size, SMALLEST_BATCH_SIZE) do |batch_size| retry_batch_size = batch_size update_positions(pairs_with_index.first(batch_size), query_name) # pairs_with_index[batch_size - 1] - can be nil for last batch # if last batch is smaller than batch_size, so we just get the last pair. last_pair_in_batch = pairs_with_index[batch_size - 1] || pairs_with_index.last caching.cache_current_index(last_pair_in_batch.last + 1) pairs_with_index = pairs_with_index.drop(batch_size) end end end def update_positions(pairs_with_position, query_name) values = pairs_with_position.map do |id, index| "(#{id}, #{start_position + (index * gap_size)})" end.join(', ') run_update_query(values, query_name) end def run_update_query(values, query_name) Issue.connection.exec_query(<<~SQL, query_name) WITH cte(cte_id, new_pos) AS MATERIALIZED ( SELECT * FROM (VALUES #{values}) as t (id, pos) ) UPDATE #{Issue.table_name} SET relative_position = cte.new_pos FROM cte WHERE cte_id = id SQL end def gaps caching.issue_count - 1 end def gap_size RelativePositioning::MAX_GAP end def start_position @start_position ||= (RelativePositioning::START_POSITION - (gaps / 2) * gap_size).to_i end def with_retry(initial_batch_size, exit_batch_size) retries = 0 batch_size = initial_batch_size begin yield batch_size retries = 0 rescue ActiveRecord::StatementTimeout, ActiveRecord::QueryCanceled => ex raise ex if batch_size < exit_batch_size if (retries += 1) == RETRIES_LIMIT # shrink the batch size in half when RETRIES limit is reached and update still fails perhaps because batch size is still too big batch_size = (batch_size / 2).to_i retries = 0 end retry end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::RelativePositionRebalancingService, :clean_gitlab_redis_shared_state, feature_category: :team_planning do let_it_be(:project, reload: true) { create(:project, :repository_disabled, skip_disk_validation: true) } let_it_be(:user) { project.creator } let_it_be(:start) { RelativePositioning::START_POSITION } let_it_be(:max_pos) { RelativePositioning::MAX_POSITION } let_it_be(:min_pos) { RelativePositioning::MIN_POSITION } let_it_be(:clump_size) { 300 } let_it_be(:unclumped, reload: true) do (1..clump_size).to_a.map do |i| create(:issue, project: project, author: user, relative_position: start + (1024 * i)) end end let_it_be(:end_clump, reload: true) do (1..clump_size).to_a.map do |i| create(:issue, project: project, author: user, relative_position: max_pos - i) end end let_it_be(:start_clump, reload: true) do (1..clump_size).to_a.map do |i| create(:issue, project: project, author: user, relative_position: min_pos + i) end end let_it_be(:nil_clump, reload: true) do (1..100).to_a.map do |i| create(:issue, project: project, author: user, relative_position: nil) end end def issues_in_position_order project.reload.issues.order_by_relative_position.to_a end subject(:service) { described_class.new(Project.id_in(project)) } context 'execute' do it 're-balances a set of issues with clumps at the end and start' do all_issues = start_clump + unclumped + end_clump.reverse expect { service.execute }.not_to change { issues_in_position_order.map(&:id) } caching = service.send(:caching) all_issues.each(&:reset) gaps = all_issues.take(all_issues.count - 1).zip(all_issues.drop(1)).map do |a, b| b.relative_position - a.relative_position end expect(caching.issue_count).to eq(900) expect(gaps).to all(be > RelativePositioning::MIN_GAP) expect(all_issues.first.relative_position).to be > (RelativePositioning::MIN_POSITION * 0.9999) expect(all_issues.last.relative_position).to be < (RelativePositioning::MAX_POSITION * 0.9999) expect(project.root_namespace.issue_repositioning_disabled?).to be false expect(project.issues.with_null_relative_position.count).to eq(100) end it 'is idempotent' do expect do service.execute service.execute end.not_to change { issues_in_position_order.map(&:id) } end it 'acts if the flag is enabled for the root namespace' do issue = create(:issue, project: project, author: user, relative_position: max_pos) expect { service.execute }.to change { issue.reload.relative_position } end it 'acts if the flag is enabled for the group' do issue = create(:issue, project: project, author: user, relative_position: max_pos) project.update!(group: create(:group)) expect { service.execute }.to change { issue.reload.relative_position } end it 'aborts if there are too many rebalances running' do caching = service.send(:caching) allow(caching).to receive(:rebalance_in_progress?).and_return(false) allow(caching).to receive(:concurrent_running_rebalances_count).and_return(10) allow(service).to receive(:caching).and_return(caching) expect { service.execute }.to raise_error(Issues::RelativePositionRebalancingService::TooManyConcurrentRebalances) expect(project.root_namespace.issue_repositioning_disabled?).to be false end it 'resumes a started rebalance even if there are already too many rebalances running' do Gitlab::Redis::SharedState.with do |redis| redis.sadd("gitlab:issues-position-rebalances:running_rebalances", [ "#{::Gitlab::Issues::Rebalancing::State::PROJECT}/#{project.id}", "1/100" ] ) end caching = service.send(:caching) allow(caching).to receive(:concurrent_running_rebalances_count).and_return(10) allow(service).to receive(:caching).and_return(caching) expect { service.execute }.not_to raise_error end context 're-balancing is retried on statement timeout exceptions' do subject { service } it 'retries update statement' do call_count = 0 allow(subject).to receive(:run_update_query) do call_count += 1 if call_count < 13 raise(ActiveRecord::QueryCanceled) else call_count = 0 if call_count == 13 + 16 # 16 = 17 sub-batches - 1 call that succeeded as part of 5th batch true end end # call math: # batches start at 100 and are split in half after every 3 retries if ActiveRecord::StatementTimeout exception is raised. # We raise ActiveRecord::StatementTimeout exception for 13 calls: # 1. 100 => 3 calls # 2. 100/2=50 => 3 calls + 3 above = 6 calls, raise ActiveRecord::StatementTimeout # 3. 50/2=25 => 3 calls + 6 above = 9 calls, raise ActiveRecord::StatementTimeout # 4. 25/2=12 => 3 calls + 9 above = 12 calls, raise ActiveRecord::StatementTimeout # 5. 12/2=6 => 1 call + 12 above = 13 calls, run successfully # # so out of 100 elements we created batches of 6 items => 100/6 = 17 sub-batches of 6 or less elements # # project.issues.count: 900 issues, so 9 batches of 100 => 9 * (13+16) = 261 expect(subject).to receive(:update_positions).exactly(261).times.and_call_original subject.execute end end context 'when resuming a stopped rebalance' do before do service.send(:preload_issue_ids) expect(service.send(:caching).get_cached_issue_ids(0, 300)).not_to be_empty # simulate we already rebalanced half the issues index = clump_size * 3 / 2 + 1 service.send(:caching).cache_current_index(index) end it 'rebalances the other half of issues' do expect(subject).to receive(:update_positions_with_retry).exactly(5).and_call_original subject.execute end end shared_examples 'no-op on the retried job' do it 'does not update positions in the 2nd .execute' do original_order = issues_in_position_order.map(&:id) # preloads issue ids on both runs expect(service).to receive(:preload_issue_ids).twice.and_call_original # 1st run performs rebalancing expect(service).to receive(:update_positions_with_retry).exactly(9).times.and_call_original expect { service.execute }.to raise_error(StandardError) # 2nd run is a no-op expect(service).not_to receive(:update_positions_with_retry) expect { service.execute }.to raise_error(StandardError) # order is preserved expect(original_order).to match_array(issues_in_position_order.map(&:id)) end end context 'when error is raised in cache cleanup step' do let_it_be(:root_namespace_id) { project.root_namespace.id } context 'when srem fails' do before do Gitlab::Redis::SharedState.with do |redis| allow(redis).to receive(:srem?).and_raise(StandardError) end end it_behaves_like 'no-op on the retried job' end context 'when delete issues ids sorted set fails' do before do Gitlab::Redis::SharedState.with do |redis| allow(redis).to receive(:del).and_call_original allow(redis).to receive(:del) .with("#{Gitlab::Issues::Rebalancing::State::REDIS_KEY_PREFIX}:#{root_namespace_id}") .and_raise(StandardError) end end it_behaves_like 'no-op on the retried job' end context 'when delete current_index_key fails' do before do Gitlab::Redis::SharedState.with do |redis| allow(redis).to receive(:del).and_call_original allow(redis).to receive(:del) .with("#{Gitlab::Issues::Rebalancing::State::REDIS_KEY_PREFIX}:#{root_namespace_id}:current_index") .and_raise(StandardError) end end it_behaves_like 'no-op on the retried job' end context 'when setting recently finished key fails' do before do Gitlab::Redis::SharedState.with do |redis| allow(redis).to receive(:set).and_call_original allow(redis).to receive(:set) .with( "#{Gitlab::Issues::Rebalancing::State::RECENTLY_FINISHED_REBALANCE_PREFIX}:2:#{project.id}", anything, anything ) .and_raise(StandardError) end end it 'reruns the next job in full' do original_order = issues_in_position_order.map(&:id) # preloads issue ids on both runs expect(service).to receive(:preload_issue_ids).twice.and_call_original # 1st run performs rebalancing expect(service).to receive(:update_positions_with_retry).exactly(9).times.and_call_original expect { service.execute }.to raise_error(StandardError) # 2nd run performs rebalancing in full expect(service).to receive(:update_positions_with_retry).exactly(9).times.and_call_original expect { service.execute }.to raise_error(StandardError) # order is preserved expect(original_order).to match_array(issues_in_position_order.map(&:id)) end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class BaseService < ::IssuableBaseService extend ::Gitlab::Utils::Override include IncidentManagement::UsageData include IssueTypeHelpers def hook_data(issue, action, old_associations: {}) hook_data = issue.to_hook_data(current_user, old_associations: old_associations) hook_data[:object_attributes][:action] = action hook_data end def reopen_service Issues::ReopenService end def close_service Issues::CloseService end NO_REBALANCING_NEEDED = ((RelativePositioning::MIN_POSITION * 0.9999)..(RelativePositioning::MAX_POSITION * 0.9999)) def rebalance_if_needed(issue) return unless issue return if issue.relative_position.nil? return if NO_REBALANCING_NEEDED.cover?(issue.relative_position) Issues::RebalancingWorker.perform_async(nil, *issue.project.self_or_root_group_ids) end private # overriding this because IssuableBaseService#constructor_container_arg returns { project: value } # Issues::ReopenService constructor signature is different now, it takes container instead of project also # IssuableBaseService#change_state dynamically picks one of the `Issues::ReopenService`, `Epics::ReopenService` or # MergeRequests::ReopenService, so we need this method to return { }container: value } for Issues::ReopenService def self.constructor_container_arg(value) { container: value } end def find_work_item_type_id(issue_type) work_item_type = WorkItems::Type.default_by_type(issue_type) work_item_type ||= WorkItems::Type.default_issue_type work_item_type.id end def filter_params(issue) super params.delete(:issue_type) unless create_issue_type_allowed?(issue, params[:issue_type]) if params[:work_item_type].present? && !create_issue_type_allowed?(project, params[:work_item_type].base_type) params.delete(:work_item_type) end moved_issue = params.delete(:moved_issue) # Setting created_at, updated_at and iid is allowed only for admins and owners or # when moving an issue as we preserve the original issue attributes except id and iid. params.delete(:iid) unless current_user.can?(:set_issue_iid, project) params.delete(:created_at) unless moved_issue || current_user.can?(:set_issue_created_at, project) params.delete(:updated_at) unless moved_issue || current_user.can?(:set_issue_updated_at, project) # Only users with permission to handle error data can add it to issues params.delete(:sentry_issue_attributes) unless current_user.can?(:update_sentry_issue, project) issue.system_note_timestamp = params[:created_at] || params[:updated_at] end override :handle_move_between_ids def handle_move_between_ids(issue) issue.check_repositioning_allowed! if params[:move_between_ids] super rebalance_if_needed(issue) end def handle_escalation_status_change(issue) return unless issue.supports_escalation? if issue.escalation_status ::IncidentManagement::IssuableEscalationStatuses::AfterUpdateService.new( issue, current_user ).execute else ::IncidentManagement::IssuableEscalationStatuses::CreateService.new(issue).execute end end def issuable_for_positioning(id, positioning_scope) return unless id positioning_scope.find(id) end def create_assignee_note(issue, old_assignees) SystemNoteService.change_issuable_assignees( issue, issue.project, current_user, old_assignees) end def execute_hooks(issue, action = 'open', old_associations: {}) issue_data = Gitlab::Lazy.new { hook_data(issue, action, old_associations: old_associations) } hooks_scope = issue.confidential? ? :confidential_issue_hooks : :issue_hooks issue.namespace.execute_hooks(issue_data, hooks_scope) issue.namespace.execute_integrations(issue_data, hooks_scope) execute_incident_hooks(issue, issue_data) if issue.work_item_type&.incident? execute_group_mention_hooks(issue, issue_data) if action == 'open' end # We can remove this code after proposal in # https://gitlab.com/gitlab-org/gitlab/-/issues/367550#proposal is updated. def execute_incident_hooks(issue, issue_data) issue_data[:object_kind] = 'incident' issue_data[:event_type] = 'incident' issue.namespace.execute_integrations(issue_data, :incident_hooks) end def execute_group_mention_hooks(issue, issue_data) return unless issue.instance_of?(Issue) args = { mentionable_type: 'Issue', mentionable_id: issue.id, hook_data: issue_data, is_confidential: issue.confidential? } issue.run_after_commit_or_now do Integrations::GroupMentionWorker.perform_async(args) end end def update_project_counter_caches?(issue) super || issue.confidential_changed? end def log_audit_event(issue, user, event_type, message) # defined in EE end end end Issues::BaseService.prepend_mod_with('Issues::BaseService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::BaseService, feature_category: :team_planning do describe '#constructor_container_arg' do it { expect(described_class.constructor_container_arg("some-value")).to eq({ container: "some-value" }) } end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class MoveService < Issuable::Clone::BaseService extend ::Gitlab::Utils::Override MoveError = Class.new(StandardError) def execute(issue, target_project) @target_project = target_project verify_can_move_issue!(issue, target_project) super notify_participants # Updates old issue sent notifications allowing # to receive service desk emails on the new moved issue. update_service_desk_sent_notifications copy_email_participants queue_copy_designs new_entity end private attr_reader :target_project def verify_can_move_issue!(issue, target_project) unless issue.supports_move_and_clone? raise MoveError, s_('MoveIssue|Cannot move issues of \'%{issue_type}\' type.') % { issue_type: issue.issue_type } end unless issue.can_move?(current_user, @target_project) raise MoveError, s_('MoveIssue|Cannot move issue due to insufficient permissions!') end if @project == @target_project raise MoveError, s_('MoveIssue|Cannot move issue to project it originates from!') end end def update_service_desk_sent_notifications return unless original_entity.from_service_desk? original_entity .sent_notifications.update_all(project_id: new_entity.project_id, noteable_id: new_entity.id) end def copy_email_participants new_attributes = { id: nil, issue_id: new_entity.id } new_participants = original_entity.issue_email_participants.dup new_participants.each do |participant| participant.assign_attributes(new_attributes) end IssueEmailParticipant.bulk_insert!(new_participants) end override :update_old_entity def update_old_entity super rewrite_related_issues mark_as_moved end override :update_new_entity def update_new_entity super copy_contacts end def create_new_entity new_params = { id: nil, iid: nil, relative_position: relative_position, project: target_project, author: original_entity.author, assignee_ids: original_entity.assignee_ids, moved_issue: true } new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params) new_params = new_params.merge(rewritten_old_entity_attributes) # spam checking is not necessary, as no new content is being created. # Skip creation of system notes for existing attributes of the issue. The system notes of the old # issue are copied over so we don't want to end up with duplicate notes. create_result = CreateService.new( container: @target_project, current_user: @current_user, params: new_params, perform_spam_check: false ).execute(skip_system_notes: true) raise MoveError, create_result.errors.join(', ') if create_result.error? && create_result[:issue].blank? create_result[:issue] end def queue_copy_designs return unless original_entity.designs.present? response = DesignManagement::CopyDesignCollection::QueueService.new( current_user, original_entity, new_entity ).execute log_error(response.message) if response.error? end def mark_as_moved original_entity.update(moved_to: new_entity) end def rewrite_related_issues source_issue_links = IssueLink.for_source(original_entity) source_issue_links.update_all(source_id: new_entity.id) target_issue_links = IssueLink.for_target(original_entity) target_issue_links.update_all(target_id: new_entity.id) end def copy_contacts return unless original_entity.project.root_ancestor == new_entity.project.root_ancestor new_entity.customer_relations_contacts = original_entity.customer_relations_contacts end def notify_participants notification_service.async.issue_moved(original_entity, new_entity, @current_user) end def add_note_from SystemNoteService.noteable_moved( new_entity, target_project, original_entity, current_user, direction: :from ) end def add_note_to SystemNoteService.noteable_moved( original_entity, old_project, new_entity, current_user, direction: :to ) end end end Issues::MoveService.prepend_mod_with('Issues::MoveService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::MoveService, feature_category: :team_planning do include DesignManagementTestHelpers let_it_be(:user) { create(:user) } let_it_be(:author) { create(:user) } let_it_be(:title) { 'Some issue' } let_it_be(:description) { "Some issue description with mention to #{user.to_reference}" } let_it_be(:group) { create(:group, :private) } let_it_be(:sub_group_1) { create(:group, :private, parent: group) } let_it_be(:sub_group_2) { create(:group, :private, parent: group) } let_it_be(:old_project) { create(:project, namespace: sub_group_1) } let_it_be(:new_project) { create(:project, namespace: sub_group_2) } let(:old_issue) do create(:issue, title: title, description: description, project: old_project, author: author, created_at: 1.day.ago, updated_at: 1.day.ago) end subject(:move_service) do described_class.new(container: old_project, current_user: user) end shared_context 'user can move issue' do before do old_project.add_reporter(user) new_project.add_reporter(user) end end describe '#execute' do shared_context 'issue move executed' do let!(:new_issue) { move_service.execute(old_issue, new_project) } end context 'when issue creation fails' do include_context 'user can move issue' before do allow_next_instance_of(Issues::CreateService) do |create_service| allow(create_service).to receive(:execute).and_return(ServiceResponse.error(message: 'some error')) end end it 'raises a move error' do expect { move_service.execute(old_issue, new_project) }.to raise_error( Issues::MoveService::MoveError, 'some error' ) end end context 'issue movable' do include_context 'user can move issue' it 'creates resource state event' do expect { move_service.execute(old_issue, new_project) }.to change(ResourceStateEvent.where(issue_id: old_issue), :count).by(1) end context 'generic issue' do include_context 'issue move executed' it 'creates a new issue in a new project' do expect(new_issue.project).to eq new_project expect(new_issue.namespace_id).to eq new_project.project_namespace_id end it 'copies issue title' do expect(new_issue.title).to eq title end it 'copies issue description' do expect(new_issue.description).to eq description end it 'adds system note to old issue at the end' do expect(old_issue.notes.last.note).to start_with 'moved to' end it 'adds system note to new issue at the end', :freeze_time do system_note = new_issue.notes.last expect(system_note.note).to start_with 'moved from' expect(system_note.created_at).to be_like_time(Time.current) end it 'closes old issue' do expect(old_issue.closed?).to be true end it 'persists new issue' do expect(new_issue.persisted?).to be true end it 'persists all changes' do expect(old_issue.changed?).to be false expect(new_issue.changed?).to be false end it 'preserves author' do expect(new_issue.author).to eq author end it 'creates a new internal id for issue' do expect(new_issue.iid).to be 1 end it 'marks issue as moved' do expect(old_issue.moved?).to eq true expect(old_issue.moved_to).to eq new_issue end it 'marks issue as closed' do expect(old_issue.closed?).to eq true end it 'preserves create time' do expect(old_issue.created_at).to eq new_issue.created_at end end context 'issue with award emoji' do let!(:award_emoji) { create(:award_emoji, awardable: old_issue) } it 'copies the award emoji' do old_issue.reload new_issue = move_service.execute(old_issue, new_project) expect(old_issue.award_emoji.first.name).to eq new_issue.reload.award_emoji.first.name end end context 'issue with milestone' do let(:milestone) { create(:milestone, group: sub_group_1) } let(:new_project) { create(:project, namespace: sub_group_1) } let(:old_issue) do create(:issue, title: title, description: description, project: old_project, author: author, milestone: milestone) end before do create(:resource_milestone_event, issue: old_issue, milestone: milestone, action: :add) end it 'does not create extra milestone events' do new_issue = move_service.execute(old_issue, new_project) expect(new_issue.resource_milestone_events.count).to eq(old_issue.resource_milestone_events.count) end end context 'issue with due date' do let(:old_issue) do create(:issue, title: title, description: description, project: old_project, author: author, due_date: '2020-01-10') end before do old_issue.update!(due_date: Date.today) SystemNoteService.change_start_date_or_due_date(old_issue, old_project, author, old_issue.previous_changes.slice('due_date')) end it 'does not create extra system notes' do new_issue = move_service.execute(old_issue, new_project) expect(new_issue.notes.count).to eq(old_issue.notes.count) end end context 'issue with assignee' do let_it_be(:assignee) { create(:user) } before do old_issue.assignees = [assignee] end it 'preserves assignee with access to the new issue' do new_project.add_reporter(assignee) new_issue = move_service.execute(old_issue, new_project) expect(new_issue.assignees).to eq([assignee]) end it 'ignores assignee without access to the new issue' do new_issue = move_service.execute(old_issue, new_project) expect(new_issue.assignees).to be_empty end end context 'issue with contacts' do let_it_be(:contacts) { create_list(:contact, 2, group: group) } before do old_issue.customer_relations_contacts = contacts end it 'preserves contacts' do new_issue = move_service.execute(old_issue, new_project) expect(new_issue.customer_relations_contacts).to eq(contacts) end context 'when moving to another root group' do let(:another_project) { create(:project, namespace: create(:group)) } before do another_project.add_reporter(user) end it 'does not preserve contacts' do new_issue = move_service.execute(old_issue, another_project) expect(new_issue.customer_relations_contacts).to be_empty end end end context 'moving to same project' do let(:new_project) { old_project } it 'raises error' do expect { move_service.execute(old_issue, new_project) } .to raise_error(StandardError, /Cannot move issue/) end end context 'project issue hooks' do let_it_be(:old_project_hook) { create(:project_hook, project: old_project, issues_events: true) } let_it_be(:new_project_hook) { create(:project_hook, project: new_project, issues_events: true) } let(:expected_new_project_hook_payload) do hash_including( event_type: 'issue', object_kind: 'issue', object_attributes: include( project_id: new_project.id, state: 'opened', action: 'open' ) ) end let(:expected_old_project_hook_payload) do hash_including( event_type: 'issue', object_kind: 'issue', changes: { state_id: { current: 2, previous: 1 }, closed_at: { current: kind_of(Time), previous: nil }, updated_at: { current: kind_of(Time), previous: kind_of(Time) } }, object_attributes: include( id: old_issue.id, closed_at: kind_of(Time), state: 'closed', action: 'close' ) ) end it 'executes project issue hooks for both projects' do expect_next_instance_of(WebHookService, new_project_hook, expected_new_project_hook_payload, 'issue_hooks') do |service| expect(service).to receive(:async_execute).once end expect_next_instance_of(WebHookService, old_project_hook, expected_old_project_hook_payload, 'issue_hooks') do |service| expect(service).to receive(:async_execute).once end move_service.execute(old_issue, new_project) end end # These tests verify that notes are copied. More thorough tests are in # the unit test for Notes::CopyService. context 'issue with notes' do let!(:notes) do [ create(:note, noteable: old_issue, project: old_project, created_at: 2.weeks.ago, updated_at: 1.week.ago), create(:note, noteable: old_issue, project: old_project) ] end let(:copied_notes) { new_issue.notes.limit(notes.size) } # Remove the system note added by the copy itself include_context 'issue move executed' it 'copies existing notes in order' do expect(copied_notes.order('id ASC').pluck(:note)).to eq(notes.map(&:note)) end end context 'issue with a design', :clean_gitlab_redis_shared_state do let_it_be(:new_project) { create(:project) } let!(:design) { create(:design, :with_lfs_file, issue: old_issue) } let!(:note) { create(:diff_note_on_design, noteable: design, issue: old_issue, project: old_issue.project) } let(:subject) { move_service.execute(old_issue, new_project) } before do enable_design_management end it 'calls CopyDesignCollection::QueueService' do expect(DesignManagement::CopyDesignCollection::QueueService).to receive(:new) .with(user, old_issue, kind_of(Issue)) .and_call_original subject end it 'logs if QueueService returns an error', :aggregate_failures do error_message = 'error' expect_next_instance_of(DesignManagement::CopyDesignCollection::QueueService) do |service| expect(service).to receive(:execute).and_return( ServiceResponse.error(message: error_message) ) end expect(Gitlab::AppLogger).to receive(:error).with(error_message) subject end # Perform a small integration test to ensure the services and worker # can correctly create designs. it 'copies the design and its notes', :sidekiq_inline, :aggregate_failures do new_issue = subject expect(new_issue.designs.size).to eq(1) expect(new_issue.designs.first.notes.size).to eq(1) end end context 'issue relative position' do let(:subject) { move_service.execute(old_issue, new_project) } it_behaves_like 'copy or reset relative position' end context 'issue with escalation status' do it 'keeps the escalation status' do escalation_status = create(:incident_management_issuable_escalation_status, issue: old_issue) move_service.execute(old_issue, new_project) expect(escalation_status.reload.issue).to eq(old_issue) end end end describe 'move permissions' do let(:move) { move_service.execute(old_issue, new_project) } context 'user is reporter in both projects' do include_context 'user can move issue' it { expect { move }.not_to raise_error } end context 'user is reporter only in new project' do before do new_project.add_reporter(user) end it { expect { move }.to raise_error(StandardError, /permissions/) } end context 'user is reporter only in old project' do before do old_project.add_reporter(user) end it { expect { move }.to raise_error(StandardError, /permissions/) } end context 'user is reporter in one project and guest in another' do before do new_project.add_guest(user) old_project.add_reporter(user) end it { expect { move }.to raise_error(StandardError, /permissions/) } end context 'issue has already been moved' do include_context 'user can move issue' let(:moved_to_issue) { create(:issue) } let(:old_issue) do create(:issue, project: old_project, author: author, moved_to: moved_to_issue) end it { expect { move }.to raise_error(StandardError, /permissions/) } end context 'issue is not persisted' do include_context 'user can move issue' let(:old_issue) { build(:issue, project: old_project, author: author) } it { expect { move }.to raise_error(StandardError, /permissions/) } end end end describe '#rewrite_related_issues' do include_context 'user can move issue' let(:admin) { create(:admin) } let(:authorized_project) { create(:project) } let(:authorized_project2) { create(:project) } let(:unauthorized_project) { create(:project) } let(:authorized_issue_b) { create(:issue, project: authorized_project) } let(:authorized_issue_c) { create(:issue, project: authorized_project2) } let(:authorized_issue_d) { create(:issue, project: authorized_project2) } let(:unauthorized_issue) { create(:issue, project: unauthorized_project) } let!(:issue_link_a) { create(:issue_link, source: old_issue, target: authorized_issue_b) } let!(:issue_link_b) { create(:issue_link, source: old_issue, target: unauthorized_issue) } let!(:issue_link_c) { create(:issue_link, source: old_issue, target: authorized_issue_c) } let!(:issue_link_d) { create(:issue_link, source: authorized_issue_d, target: old_issue) } before do authorized_project.add_developer(user) authorized_project.add_developer(admin) authorized_project2.add_developer(user) authorized_project2.add_developer(admin) end context 'multiple related issues' do context 'when admin mode is enabled', :enable_admin_mode do it 'moves all related issues and retains permissions' do new_issue = move_service.execute(old_issue, new_project) expect(new_issue.related_issues(admin)) .to match_array([authorized_issue_b, authorized_issue_c, authorized_issue_d, unauthorized_issue]) expect(new_issue.related_issues(user)) .to match_array([authorized_issue_b, authorized_issue_c, authorized_issue_d]) expect(authorized_issue_d.related_issues(user)) .to match_array([new_issue]) end end context 'when admin mode is disabled' do it 'moves all related issues and retains permissions' do new_issue = move_service.execute(old_issue, new_project) expect(new_issue.related_issues(admin)) .to match_array([authorized_issue_b, authorized_issue_c, authorized_issue_d]) expect(new_issue.related_issues(user)) .to match_array([authorized_issue_b, authorized_issue_c, authorized_issue_d]) expect(authorized_issue_d.related_issues(user)) .to match_array([new_issue]) end end end end context 'updating sent notifications' do let!(:old_issue_notification_1) { create(:sent_notification, project: old_issue.project, noteable: old_issue) } let!(:old_issue_notification_2) { create(:sent_notification, project: old_issue.project, noteable: old_issue) } let!(:other_issue_notification) { create(:sent_notification, project: old_issue.project) } include_context 'user can move issue' context 'when issue is from service desk' do before do allow(old_issue).to receive(:from_service_desk?).and_return(true) end it 'updates moved issue sent notifications' do new_issue = move_service.execute(old_issue, new_project) old_issue_notification_1.reload old_issue_notification_2.reload expect(old_issue_notification_1.project_id).to eq(new_issue.project_id) expect(old_issue_notification_1.noteable_id).to eq(new_issue.id) expect(old_issue_notification_2.project_id).to eq(new_issue.project_id) expect(old_issue_notification_2.noteable_id).to eq(new_issue.id) end it 'does not update other issues sent notifications' do expect do move_service.execute(old_issue, new_project) other_issue_notification.reload end.not_to change { other_issue_notification.noteable_id } end end context 'when issue is not from service desk' do it 'does not update sent notifications' do move_service.execute(old_issue, new_project) old_issue_notification_1.reload old_issue_notification_2.reload expect(old_issue_notification_1.project_id).to eq(old_issue.project_id) expect(old_issue_notification_1.noteable_id).to eq(old_issue.id) expect(old_issue_notification_2.project_id).to eq(old_issue.project_id) expect(old_issue_notification_2.noteable_id).to eq(old_issue.id) end end end context 'copying email participants' do let!(:participant1) { create(:issue_email_participant, email: '[email protected]', issue: old_issue) } let!(:participant2) { create(:issue_email_participant, email: '[email protected]', issue: old_issue) } let!(:participant3) { create(:issue_email_participant, email: '[email protected]') } include_context 'user can move issue' subject(:new_issue) do move_service.execute(old_issue, new_project) end it 'copies moved issue email participants' do new_issue expect(participant1.reload.issue).to eq(old_issue) expect(participant2.reload.issue).to eq(old_issue) expect(new_issue.issue_email_participants.pluck(:email)) .to match_array([participant1.email, participant2.email]) end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class CloneService < Issuable::Clone::BaseService CloneError = Class.new(StandardError) def execute(issue, target_project, with_notes: false) @target_project = target_project @with_notes = with_notes verify_can_clone_issue!(issue, target_project) super(issue, target_project) notify_participants queue_copy_designs new_entity end private attr_reader :target_project attr_reader :with_notes def verify_can_clone_issue!(issue, target_project) unless issue.supports_move_and_clone? raise CloneError, s_('CloneIssue|Cannot clone issues of \'%{issue_type}\' type.') % { issue_type: issue.issue_type } end unless issue.can_clone?(current_user, target_project) raise CloneError, s_('CloneIssue|Cannot clone issue due to insufficient permissions!') end if target_project.pending_delete? raise CloneError, s_('CloneIssue|Cannot clone issue to target project as it is pending deletion.') end end def update_new_entity # we don't call `super` because we want to be able to decide whether or not to copy all comments over. update_new_entity_description if with_notes copy_notes copy_resource_events end end def update_old_entity # no-op # The base_service closes the old issue, we don't want that, so we override here so nothing happens. end def create_new_entity new_params = { id: nil, iid: nil, relative_position: relative_position, project: target_project, author: current_user, assignee_ids: original_entity.assignee_ids } new_params = original_entity.serializable_hash.symbolize_keys.merge(new_params) new_params = new_params.merge(rewritten_old_entity_attributes) new_params.delete(:created_at) new_params.delete(:updated_at) # spam checking is not necessary, as no new content is being created. # Skip creation of system notes for existing attributes of the issue when cloning with notes. # The system notes of the old issue are copied over so we don't want to end up with duplicate notes. # When cloning without notes, we want to generate system notes for the attributes that were copied. create_result = CreateService.new( container: target_project, current_user: current_user, params: new_params, perform_spam_check: false ).execute(skip_system_notes: with_notes) raise CloneError, create_result.errors.join(', ') if create_result.error? && create_result[:issue].blank? create_result[:issue] end def queue_copy_designs return unless original_entity.designs.present? response = DesignManagement::CopyDesignCollection::QueueService.new( current_user, original_entity, new_entity ).execute log_error(response.message) if response.error? end def notify_participants notification_service.async.issue_cloned(original_entity, new_entity, current_user) end def add_note_from SystemNoteService.noteable_cloned( new_entity, target_project, original_entity, current_user, direction: :from, created_at: new_entity.created_at ) end def add_note_to SystemNoteService.noteable_cloned(original_entity, old_project, new_entity, current_user, direction: :to) end end end Issues::CloneService.prepend_mod_with('Issues::CloneService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::CloneService, feature_category: :team_planning do include DesignManagementTestHelpers let_it_be(:user) { create(:user) } let_it_be(:author) { create(:user) } let_it_be(:title) { 'Some issue' } let_it_be(:description) { "Some issue description with mention to #{user.to_reference}" } let_it_be(:group) { create(:group, :private) } let_it_be(:sub_group_1) { create(:group, :private, parent: group) } let_it_be(:sub_group_2) { create(:group, :private, parent: group) } let_it_be(:old_project) { create(:project, namespace: sub_group_1) } let_it_be(:new_project) { create(:project, namespace: sub_group_2) } let_it_be(:old_issue, reload: true) do create(:issue, title: title, description: description, project: old_project, author: author) end let(:with_notes) { false } subject(:clone_service) do described_class.new(container: old_project, current_user: user) end shared_context 'user can clone issue' do before do old_project.add_reporter(user) new_project.add_reporter(user) end end describe '#execute' do context 'issue movable' do include_context 'user can clone issue' context 'when issue creation fails' do before do allow_next_instance_of(Issues::CreateService) do |create_service| allow(create_service).to receive(:execute).and_return(ServiceResponse.error(message: 'some error')) end end it 'raises a clone error' do expect { clone_service.execute(old_issue, new_project) }.to raise_error( Issues::CloneService::CloneError, 'some error' ) end end context 'generic issue' do let!(:new_issue) { clone_service.execute(old_issue, new_project, with_notes: with_notes) } it 'creates a new issue in the selected project' do expect do clone_service.execute(old_issue, new_project) end.to change { new_project.issues.count }.by(1) end it 'copies issue title' do expect(new_issue.title).to eq title end it 'copies issue description' do expect(new_issue.description).to eq description end it 'adds system note to old issue at the end' do expect(old_issue.notes.last.note).to start_with 'cloned to' end it 'adds system note to new issue at the start' do # We set an assignee so an assignee system note will be generated and # we can assert that the "cloned from" note is the first one assignee = create(:user) new_project.add_developer(assignee) old_issue.assignees = [assignee] new_issue = clone_service.execute(old_issue, new_project) expect(new_issue.notes.size).to eq(2) cloned_from_note = new_issue.notes.last expect(cloned_from_note.note).to start_with 'cloned from' expect(new_issue.notes.fresh.first).to eq(cloned_from_note) end it 'keeps old issue open' do expect(old_issue.open?).to be true end it 'persists new issue' do expect(new_issue.persisted?).to be true end it 'persists all changes' do expect(old_issue.changed?).to be false expect(new_issue.changed?).to be false end it 'sets the current user as author' do expect(new_issue.author).to eq user end it 'creates a new internal id for issue' do expect(new_issue.iid).to be_present end it 'sets created_at of new issue to the time of clone' do future_time = 5.days.from_now travel_to(future_time) do new_issue = clone_service.execute(old_issue, new_project, with_notes: with_notes) expect(new_issue.created_at).to be_like_time(future_time) end end it 'does not set moved_issue' do expect(old_issue.moved?).to eq(false) end context 'when copying comments' do let(:with_notes) { true } it 'does not create extra system notes' do new_issue = clone_service.execute(old_issue, new_project, with_notes: with_notes) expect(new_issue.notes.count).to eq(old_issue.notes.count) end end end context 'issue with system notes and resource events' do before do create(:note, :system, noteable: old_issue, project: old_project) create(:resource_label_event, label: create(:label, project: old_project), issue: old_issue) create(:resource_state_event, issue: old_issue, state: :reopened) create(:resource_milestone_event, issue: old_issue, action: 'remove', milestone_id: nil) end it 'does not copy system notes and resource events' do new_issue = clone_service.execute(old_issue, new_project) # 1 here is for the "cloned from" system note expect(new_issue.notes.count).to eq(1) expect(new_issue.resource_state_events).to be_empty expect(new_issue.resource_milestone_events).to be_empty end end context 'issue with award emoji' do let!(:award_emoji) { create(:award_emoji, awardable: old_issue) } it 'does not copy the award emoji' do old_issue.reload new_issue = clone_service.execute(old_issue, new_project) expect(new_issue.reload.award_emoji).to be_empty end end context 'issue with milestone' do let(:milestone) { create(:milestone, group: sub_group_1) } let(:new_project) { create(:project, namespace: sub_group_1) } let(:old_issue) do create(:issue, title: title, description: description, project: old_project, author: author, milestone: milestone) end it 'copies the milestone and creates a resource_milestone_event' do new_issue = clone_service.execute(old_issue, new_project) expect(new_issue.milestone).to eq(milestone) expect(new_issue.resource_milestone_events.count).to eq(1) end end context 'issue with label' do let(:label) { create(:group_label, group: sub_group_1) } let(:new_project) { create(:project, namespace: sub_group_1) } let(:old_issue) do create(:issue, project: old_project, labels: [label]) end it 'copies the label and creates a resource_label_event' do new_issue = clone_service.execute(old_issue, new_project) expect(new_issue.labels).to contain_exactly(label) expect(new_issue.resource_label_events.count).to eq(1) end end context 'issue with due date' do let(:date) { Date.parse('2020-01-10') } let(:new_date) { date + 1.week } let(:old_issue) do create(:issue, title: title, description: description, project: old_project, author: author, due_date: date) end before do old_issue.update!(due_date: new_date) SystemNoteService.change_start_date_or_due_date(old_issue, old_project, author, old_issue.previous_changes.slice('due_date')) end it 'keeps the same due date' do new_issue = clone_service.execute(old_issue, new_project) expect(new_issue.due_date).to eq(old_issue.due_date) end end context 'issue with assignee' do let_it_be(:assignee) { create(:user) } before do old_issue.assignees = [assignee] end it 'preserves assignee with access to the new issue' do new_project.add_reporter(assignee) new_issue = clone_service.execute(old_issue, new_project) expect(new_issue.assignees).to eq([assignee]) end it 'ignores assignee without access to the new issue' do new_issue = clone_service.execute(old_issue, new_project) expect(new_issue.assignees).to be_empty end end context 'issue is confidential' do before do old_issue.update_columns(confidential: true) end it 'preserves the confidential flag' do new_issue = clone_service.execute(old_issue, new_project) expect(new_issue.confidential).to be true end end context 'moving to same project' do it 'also works' do new_issue = clone_service.execute(old_issue, old_project) expect(new_issue.project).to eq(old_project) expect(new_issue.iid).not_to eq(old_issue.iid) end end context 'project issue hooks' do let!(:hook) { create(:project_hook, project: old_project, issues_events: true) } it 'executes project issue hooks' do allow_next_instance_of(WebHookService) do |instance| allow(instance).to receive(:execute) end # Ideally, we'd test that `WebHookWorker.jobs.size` increased by 1, # but since the entire spec run takes place in a transaction, we never # actually get to the `after_commit` hook that queues these jobs. expect { clone_service.execute(old_issue, new_project) } .not_to raise_error # Sidekiq::Worker::EnqueueFromTransactionError end end # These tests verify that notes are copied. More thorough tests are in # the unit test for Notes::CopyService. context 'issue with notes' do let_it_be(:notes) do [ create(:note, noteable: old_issue, project: old_project, created_at: 2.weeks.ago, updated_at: 1.week.ago), create(:note, noteable: old_issue, project: old_project) ] end let(:new_issue) { clone_service.execute(old_issue, new_project, with_notes: with_notes) } let(:copied_notes) { new_issue.notes.limit(notes.size) } # Remove the system note added by the copy itself it 'does not copy notes' do # only the system note expect(copied_notes.order('id ASC').pluck(:note).size).to eq(1) end context 'when copying comments' do let(:with_notes) { true } it 'copies existing notes in order' do expect(copied_notes.order('id ASC').pluck(:note)).to eq(notes.map(&:note)) end end end context 'issue with a design', :clean_gitlab_redis_shared_state do let_it_be(:new_project) { create(:project) } let!(:design) { create(:design, :with_lfs_file, issue: old_issue) } let!(:note) { create(:diff_note_on_design, noteable: design, issue: old_issue, project: old_issue.project) } let(:subject) { clone_service.execute(old_issue, new_project) } before do enable_design_management end it 'calls CopyDesignCollection::QueueService' do expect(DesignManagement::CopyDesignCollection::QueueService).to receive(:new) .with(user, old_issue, kind_of(Issue)) .and_call_original subject end it 'logs if QueueService returns an error', :aggregate_failures do error_message = 'error' expect_next_instance_of(DesignManagement::CopyDesignCollection::QueueService) do |service| expect(service).to receive(:execute).and_return( ServiceResponse.error(message: error_message) ) end expect(Gitlab::AppLogger).to receive(:error).with(error_message) subject end # Perform a small integration test to ensure the services and worker # can correctly create designs. it 'copies the design and its notes', :sidekiq_inline, :aggregate_failures do new_issue = subject expect(new_issue.designs.size).to eq(1) expect(new_issue.designs.first.notes.size).to eq(1) end end context 'issue relative position' do let(:subject) { clone_service.execute(old_issue, new_project) } it_behaves_like 'copy or reset relative position' end end describe 'clone permissions' do let(:clone) { clone_service.execute(old_issue, new_project) } context 'target project is pending deletion' do include_context 'user can clone issue' before do new_project.update_columns(pending_delete: true) end after do new_project.update_columns(pending_delete: false) end it { expect { clone }.to raise_error(Issues::CloneService::CloneError, /pending deletion/) } end context 'user is reporter in both projects' do include_context 'user can clone issue' it { expect { clone }.not_to raise_error } end context 'user is reporter only in new project' do before do new_project.add_reporter(user) end it { expect { clone }.to raise_error(StandardError, /permissions/) } end context 'user is reporter only in old project' do before do old_project.add_reporter(user) end it { expect { clone }.to raise_error(StandardError, /permissions/) } end context 'user is reporter in one project and guest in another' do before do new_project.add_guest(user) old_project.add_reporter(user) end it { expect { clone }.to raise_error(StandardError, /permissions/) } end context 'issue is not persisted' do include_context 'user can clone issue' let(:old_issue) { build(:issue, project: old_project, author: author) } it { expect { clone }.to raise_error(StandardError, /permissions/) } end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class ReorderService < Issues::BaseService include Gitlab::Utils::StrongMemoize def execute(issue) return false unless can?(current_user, :update_issue, issue) return false unless move_between_ids update(issue, { move_between_ids: move_between_ids }) end private def update(issue, attrs) ::Issues::UpdateService.new(container: project, current_user: current_user, params: attrs).execute(issue) rescue ActiveRecord::RecordNotFound false end def move_between_ids strong_memoize(:move_between_ids) do ids = [params[:move_before_id], params[:move_after_id]] .map(&:to_i) .map { |m| m > 0 ? m : nil } ids.any? ? ids : nil end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::ReorderService, feature_category: :team_planning do let_it_be(:user) { create_default(:user) } let_it_be(:group) { create(:group) } let_it_be(:project, reload: true) { create(:project, namespace: group) } shared_examples 'issues reorder service' do context 'when reordering issues' do it 'returns false with no params' do expect(service({}).execute(issue1)).to be_falsey end it 'returns false with both invalid params' do params = { move_after_id: nil, move_before_id: non_existing_record_id } expect(service(params).execute(issue1)).to be_falsey end it 'sorts issues' do params = { move_after_id: issue2.id, move_before_id: issue3.id } service(params).execute(issue1) expect(issue1.relative_position) .to be_between(issue2.relative_position, issue3.relative_position) end it 'sorts issues if only given one neighbour, on the left' do params = { move_before_id: issue3.id } service(params).execute(issue1) expect(issue1.relative_position).to be > issue3.relative_position end it 'sorts issues if only given one neighbour, on the right' do params = { move_after_id: issue1.id } service(params).execute(issue3) expect(issue3.relative_position).to be < issue1.relative_position end end end describe '#execute' do let_it_be(:issue1, reload: true) { create(:issue, project: project, relative_position: 10) } let_it_be(:issue2) { create(:issue, project: project, relative_position: 20) } let_it_be(:issue3, reload: true) { create(:issue, project: project, relative_position: 30) } context 'when ordering issues in a project' do before do project.add_developer(user) end it_behaves_like 'issues reorder service' end context 'when ordering issues in a group' do before do group.add_developer(user) end it_behaves_like 'issues reorder service' context 'when ordering in a group issue list' do let(:params) { { move_after_id: issue2.id, move_before_id: issue3.id } } subject { service(params) } it 'sorts issues' do project2 = create(:project, namespace: group) issue4 = create(:issue, project: project2) subject.execute(issue4) expect(issue4.relative_position) .to be_between(issue2.relative_position, issue3.relative_position) end end end end def service(params) described_class.new(container: project, current_user: user, params: params) end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class PrepareImportCsvService < Import::PrepareService extend ::Gitlab::Utils::Override private override :worker def worker ImportIssuesCsvWorker end override :success_message def success_message _("Your issues are being imported. Once finished, you'll get a confirmation email.") end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::PrepareImportCsvService, feature_category: :team_planning do let_it_be(:project) { create(:project) } let_it_be(:user) { create(:user) } let(:file) { double } let(:upload_service) { double } let(:uploader) { double } let(:upload) { double } let(:subject) do described_class.new(project, user, file: file).execute end context 'when file is uploaded correctly' do let(:upload_id) { 99 } before do mock_upload end it 'returns a success message' do result = subject expect(result[:status]).to eq(:success) expect(result[:message]).to eq("Your issues are being imported. Once finished, you'll get a confirmation email.") end it 'enqueues the ImportRequirementsCsvWorker' do expect(ImportIssuesCsvWorker).to receive(:perform_async).with(user.id, project.id, upload_id) subject end end context 'when file upload fails' do before do mock_upload(false) end it 'returns an error message' do result = subject expect(result[:status]).to eq(:error) expect(result[:message]).to eq('File upload error.') end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class ImportCsvService < Issuable::ImportCsv::BaseService def execute record_import_attempt super end def email_results_to_user Notify.import_issues_csv_email(user.id, project.id, results).deliver_later end private def create_object(attributes) super[:issue] end def create_object_class Issues::CreateService end def extra_create_service_params { perform_spam_check: perform_spam_check? } end def perform_spam_check? !user.can_admin_all_resources? end def record_import_attempt Issues::CsvImport.create!(user: user, project: project) end end end Issues::ImportCsvService.prepend_mod ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::ImportCsvService, feature_category: :team_planning do let(:project) { create(:project) } let(:user) { create(:user) } let(:assignee) { create(:user, username: 'csv_assignee') } let(:file) { fixture_file_upload('spec/fixtures/csv_complex.csv') } let(:service) do uploader = FileUploader.new(project) uploader.store!(file) described_class.new(user, project, uploader) end let!(:test_milestone) { create(:milestone, project: project, title: '15.10') } include_examples 'issuable import csv service', 'issue' do let(:issuables) { project.issues } let(:email_method) { :import_issues_csv_email } end describe '#execute' do subject { service.execute } it_behaves_like 'performs a spam check', true it 'sets all issueable attributes and executes quick actions' do project.add_developer(user) project.add_developer(assignee) expect { subject }.to change { issuables.count }.by 3 expect(issuables.reload).to include( have_attributes( title: 'Title with quote"', description: 'Description', time_estimate: 3600, assignees: include(assignee), due_date: Date.new(2022, 6, 28), milestone_id: test_milestone.id ) ) end context 'when user is an admin' do before do allow(user).to receive(:can_admin_all_resources?).and_return(true) end it_behaves_like 'performs a spam check', false end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class BuildService < Issues::BaseService include ResolveDiscussions def execute(initialize_callbacks: true) filter_resolve_discussion_params container_param = case container when Project { project: project } when Namespaces::ProjectNamespace { project: container.project } else { namespace: container } end @issue = model_klass.new(issue_params.merge(container_param)).tap do |issue| set_work_item_type(issue) initialize_callbacks!(issue) if initialize_callbacks end end def issue_params_with_info_from_discussions return {} unless merge_request_to_resolve_discussions_of { title: title_from_merge_request, description: description_for_discussions } end def title_from_merge_request "Follow-up from \"#{merge_request_to_resolve_discussions_of.title}\"" end def description_for_discussions if discussions_to_resolve.empty? return "There are no unresolved discussions. "\ "Review the conversation in #{merge_request_to_resolve_discussions_of.to_reference}" end description = "The following #{'discussion'.pluralize(discussions_to_resolve.size)} "\ "from #{merge_request_to_resolve_discussions_of.to_reference} "\ "should be addressed:" [description, *items_for_discussions].join("\n\n") end def items_for_discussions discussions_to_resolve.map { |discussion| item_for_discussion(discussion) } end def item_for_discussion(discussion) first_note_to_resolve = discussion.first_note_to_resolve || discussion.first_note is_very_first_note = first_note_to_resolve == discussion.first_note action = is_very_first_note ? "started" : "commented on" note_url = Gitlab::UrlBuilder.build(first_note_to_resolve) other_note_count = discussion.notes.size - 1 discussion_info = ["- [ ] #{first_note_to_resolve.author.to_reference} #{action} a [discussion](#{note_url}): "] discussion_info << "(+#{other_note_count} #{'comment'.pluralize(other_note_count)})" if other_note_count > 0 note_without_block_quotes = Banzai::Filter::BlockquoteFenceFilter.new(first_note_to_resolve.note).call spaces = ' ' * 4 quote = note_without_block_quotes.lines.map { |line| "#{spaces}> #{line}" }.join [discussion_info.join(' '), quote].join("\n\n") end def issue_params @issue_params ||= build_issue_params end private def set_work_item_type(issue) work_item_type = if params[:work_item_type_id].present? params.delete(:work_item_type) WorkItems::Type.find_by(id: params.delete(:work_item_type_id)) # rubocop: disable CodeReuse/ActiveRecord else params.delete(:work_item_type) end # We need to support the legacy input params[:issue_type] even if we don't have the issue_type column anymore. # In the future only params[:work_item_type] should be provided base_type = work_item_type&.base_type || params[:issue_type] issue.work_item_type = if create_issue_type_allowed?(container, base_type) work_item_type || WorkItems::Type.default_by_type(base_type) else # If no work item type was provided or not allowed, we need to set it to # the default issue_type WorkItems::Type.default_by_type(::Issue::DEFAULT_ISSUE_TYPE) end end def model_klass ::Issue end def public_params # Additional params may be assigned later (in a CreateService for example) public_issue_params = [ :title, :description, :confidential ] params.slice(*public_issue_params) end def build_issue_params { author: current_user } .merge(issue_params_with_info_from_discussions) .merge(public_params) .with_indifferent_access end end end Issues::BuildService.prepend_mod_with('Issues::BuildService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::BuildService, feature_category: :team_planning do using RSpec::Parameterized::TableSyntax let_it_be(:project) { create(:project, :repository) } let_it_be(:developer) { create(:user) } let_it_be(:reporter) { create(:user) } let_it_be(:guest) { create(:user) } let(:user) { developer } before_all do project.add_developer(developer) project.add_reporter(reporter) project.add_guest(guest) end def build_issue(issue_params = {}) described_class.new(container: project, current_user: user, params: issue_params).execute end context 'for a single discussion' do describe '#execute' do let(:merge_request) { create(:merge_request, title: "Hello world", source_project: project) } let(:discussion) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, note: "Almost done").to_discussion } subject { build_issue(merge_request_to_resolve_discussions_of: merge_request.iid, discussion_to_resolve: discussion.id) } it 'references the noteable title in the issue title' do expect(subject.title).to include('Hello world') end it 'adds the note content to the description' do expect(subject.description).to include('Almost done') end end end context 'for discussions in a merge request' do let(:merge_request) { create(:merge_request_with_diff_notes, source_project: project) } describe '#items_for_discussions' do it 'has an item for each discussion' do create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.source_project, line_number: 13) service = described_class.new(container: project, current_user: user, params: { merge_request_to_resolve_discussions_of: merge_request.iid }) service.execute expect(service.items_for_discussions.size).to eq(2) end end describe '#item_for_discussion' do let(:service) { described_class.new(container: project, current_user: user, params: { merge_request_to_resolve_discussions_of: merge_request.iid }) } it 'mentions the author of the note' do discussion = create(:diff_note_on_merge_request, author: create(:user, username: 'author')).to_discussion expect(service.item_for_discussion(discussion)).to include('@author') end it 'wraps the note in a blockquote' do note_text = "This is a string\n"\ "\n"\ ">>>\n"\ "with a blockquote\n"\ "> That has a quote\n"\ ">>>\n" note_result = " > This is a string\n"\ " > \n"\ " > \n"\ " > > with a blockquote\n"\ " > > > That has a quote\n"\ " > \n" discussion = create(:diff_note_on_merge_request, note: note_text).to_discussion expect(service.item_for_discussion(discussion)).to include(note_result) end end describe '#execute' do let(:base_params) { { merge_request_to_resolve_discussions_of: merge_request.iid } } context 'without additional params' do subject { build_issue(base_params) } it 'has the merge request reference in the title' do expect(subject.title).to include(merge_request.title) end it 'has the reference of the merge request in the description' do expect(subject.description).to include(merge_request.to_reference) end end it 'uses provided title if title param given' do issue = build_issue(base_params.merge(title: 'What an issue')) expect(issue.title).to eq('What an issue') end it 'uses provided description if description param given' do issue = build_issue(base_params.merge(description: 'Fix at your earliest convenience')) expect(issue.description).to eq('Fix at your earliest convenience') end describe 'with multiple discussions' do let!(:diff_note) { create(:diff_note_on_merge_request, noteable: merge_request, project: merge_request.target_project, line_number: 15) } it 'mentions all the authors in the description' do authors = merge_request.resolvable_discussions.map(&:author) expect(build_issue(base_params).description).to include(*authors.map(&:to_reference)) end it 'has a link for each unresolved discussion in the description' do notes = merge_request.resolvable_discussions.map(&:first_note) links = notes.map { |note| Gitlab::UrlBuilder.build(note) } expect(build_issue(base_params).description).to include(*links) end it 'mentions additional notes' do create_list(:diff_note_on_merge_request, 2, noteable: merge_request, project: merge_request.target_project, in_reply_to: diff_note) expect(build_issue(base_params).description).to include('(+2 comments)') end end end end context 'For a merge request without discussions' do let(:merge_request) { create(:merge_request, source_project: project) } describe '#execute' do it 'mentions the merge request in the description' do issue = build_issue(merge_request_to_resolve_discussions_of: merge_request.iid) expect(issue.description).to include("Review the conversation in #{merge_request.to_reference}") end end end describe '#execute' do describe 'setting milestone' do context 'when developer' do it 'builds a new issues with given params' do milestone = create(:milestone, project: project) issue = build_issue(milestone_id: milestone.id) expect(issue.milestone).to eq(milestone) end it 'sets milestone to nil if it is not available for the project' do milestone = create(:milestone, project: create(:project)) issue = build_issue(milestone_id: milestone.id) expect(issue.milestone).to be_nil end end context 'when user is not a project member' do let(:user) { create(:user) } it 'cannot set milestone' do milestone = create(:milestone, project: project) issue = build_issue(milestone_id: milestone.id) expect(issue.milestone).to be_nil end end end describe 'setting issue type' do context 'with a corresponding WorkItems::Type' do let_it_be(:type_task) { WorkItems::Type.default_by_type(:task) } let_it_be(:type_task_id) { type_task.id } let_it_be(:type_issue_id) { WorkItems::Type.default_issue_type.id } let_it_be(:type_incident_id) { WorkItems::Type.default_by_type(:incident).id } let(:combined_params) { { work_item_type: type_task, issue_type: 'issue' } } let(:work_item_params) { { work_item_type_id: type_task_id } } where(:issue_params, :current_user, :work_item_type_id, :resulting_issue_type) do { issue_type: nil } | ref(:guest) | ref(:type_issue_id) | 'issue' { issue_type: 'issue' } | ref(:guest) | ref(:type_issue_id) | 'issue' { issue_type: 'incident' } | ref(:guest) | ref(:type_issue_id) | 'issue' { issue_type: 'incident' } | ref(:reporter) | ref(:type_incident_id) | 'incident' ref(:combined_params) | ref(:reporter) | ref(:type_task_id) | 'task' ref(:work_item_params) | ref(:reporter) | ref(:type_task_id) | 'task' # update once support for test_case is enabled { issue_type: 'test_case' } | ref(:guest) | ref(:type_issue_id) | 'issue' # update once support for requirement is enabled { issue_type: 'requirement' } | ref(:guest) | ref(:type_issue_id) | 'issue' { issue_type: 'invalid' } | ref(:guest) | ref(:type_issue_id) | 'issue' # ensure that we don't set a value which has a permission check but is an invalid issue type { issue_type: 'project' } | ref(:guest) | ref(:type_issue_id) | 'issue' end with_them do let(:user) { current_user } it 'builds an issue' do issue = build_issue(**issue_params) expect(issue.work_item_type_id).to eq(work_item_type_id) end end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class CloseService < Issues::BaseService # Closes the supplied issue if the current user is able to do so. def execute(issue, commit: nil, notifications: true, system_note: true, skip_authorization: false) return issue unless can_close?(issue, skip_authorization: skip_authorization) close_issue( issue, closed_via: commit, notifications: notifications, system_note: system_note ) end # Closes the supplied issue without checking if the user is authorized to # do so. # # The code calling this method is responsible for ensuring that a user is # allowed to close the given issue. def close_issue(issue, closed_via: nil, notifications: true, system_note: true) if issue.is_a?(ExternalIssue) close_external_issue(issue, closed_via) return issue end if issue.close(current_user) event_service.close_issue(issue, current_user) create_note(issue, closed_via) if system_note if current_user.project_bot? log_audit_event(issue, current_user, "#{issue.issue_type}_closed_by_project_bot", "Closed #{issue.issue_type.humanize(capitalize: false)} #{issue.title}") end closed_via = _("commit %{commit_id}") % { commit_id: closed_via.id } if closed_via.is_a?(Commit) notification_service.async.close_issue(issue, current_user, { closed_via: closed_via }) if notifications todo_service.close_issue(issue, current_user) perform_incident_management_actions(issue) execute_hooks(issue, 'close') invalidate_cache_counts(issue, users: issue.assignees) issue.update_project_counter_caches track_incident_action(current_user, issue, :incident_closed) if closed_via.is_a?(MergeRequest) store_first_mentioned_in_commit_at(issue, closed_via) Onboarding::ProgressService.new(project.namespace).execute(action: :issue_auto_closed) end Milestones::ClosedIssuesCountService.new(issue.milestone).delete_cache if issue.milestone end issue end private def can_close?(issue, skip_authorization: false) skip_authorization || can?(current_user, :update_issue, issue) || issue.is_a?(ExternalIssue) end def perform_incident_management_actions(issue) resolve_alerts(issue) resolve_incident(issue) end def close_external_issue(issue, closed_via) return unless project.external_issue_tracker&.support_close_issue? project.external_issue_tracker.close_issue(closed_via, issue, current_user) todo_service.close_issue(issue, current_user) end def create_note(issue, current_commit) SystemNoteService.change_status(issue, issue.project, current_user, issue.state, current_commit) end def resolve_alerts(issue) issue.alert_management_alerts.each { |alert| resolve_alert(alert) } end def resolve_alert(alert) return if alert.resolved? issue = alert.issue if alert.resolve SystemNoteService.change_alert_status(alert, Users::Internal.alert_bot, " because #{current_user.to_reference} closed incident #{issue.to_reference(project)}") else Gitlab::AppLogger.warn( message: 'Cannot resolve an associated Alert Management alert', issue_id: issue.id, alert_id: alert.id, alert_errors: alert.errors.messages ) end end def resolve_incident(issue) return unless issue.work_item_type&.incident? status = issue.incident_management_issuable_escalation_status || issue.build_incident_management_issuable_escalation_status return unless status.resolve SystemNoteService.change_incident_status(issue, current_user, ' by closing the incident') IncidentManagement::TimelineEvents::CreateService.resolve_incident(issue, current_user) end def store_first_mentioned_in_commit_at(issue, merge_request, max_commit_lookup: 100) metrics = issue.metrics return if metrics.nil? || metrics.first_mentioned_in_commit_at first_commit_timestamp = merge_request.commits(limit: max_commit_lookup).last.try(:authored_date) return unless first_commit_timestamp metrics.update!(first_mentioned_in_commit_at: first_commit_timestamp) end end end Issues::CloseService.prepend_mod_with('Issues::CloseService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::CloseService, feature_category: :team_planning do let(:project) { create(:project, :repository) } let(:delegated_project) { project.project_namespace.project } let(:user) { create(:user, email: "[email protected]") } let(:user2) { create(:user, email: "[email protected]") } let(:guest) { create(:user) } let(:issue) { create(:issue, title: "My issue", project: project, assignees: [user2], author: create(:user)) } let(:external_issue) { ExternalIssue.new('JIRA-123', project) } let(:closing_merge_request) { create(:merge_request, source_project: project) } let(:closing_commit) { create(:commit, project: project) } let!(:todo) { create(:todo, :assigned, user: user, project: project, target: issue, author: user2) } before do project.add_maintainer(user) project.add_developer(user2) project.add_guest(guest) end describe '#execute' do let(:service) { described_class.new(container: project, current_user: user) } context 'when skip_authorization is true' do it 'does close the issue even if user is not authorized' do non_authorized_user = create(:user) service = described_class.new(container: project, current_user: non_authorized_user) expect do service.execute(issue, skip_authorization: true) end.to change { issue.reload.state }.from('opened').to('closed') end end it 'checks if the user is authorized to update the issue' do expect(service).to receive(:can?).with(user, :update_issue, issue) .and_call_original service.execute(issue) end it 'does not close the issue when the user is not authorized to do so' do allow(service).to receive(:can?).with(user, :update_issue, issue) .and_return(false) expect(service).not_to receive(:close_issue) expect(service.execute(issue)).to eq(issue) end it 'closes the external issue even when the user is not authorized to do so' do allow(service).to receive(:can?).with(user, :update_issue, external_issue) .and_return(false) expect(service).to receive(:close_issue) .with(external_issue, closed_via: nil, notifications: true, system_note: true) service.execute(external_issue) end it 'closes the issue when the user is authorized to do so' do allow(service).to receive(:can?).with(user, :update_issue, issue) .and_return(true) expect(service).to receive(:close_issue) .with(issue, closed_via: nil, notifications: true, system_note: true) service.execute(issue) end it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do expect do service.execute(issue) BatchLoader::Executor.clear_current end.to change { project.open_issues_count }.from(1).to(0) end it 'invalidates counter cache for assignees' do expect_any_instance_of(User).to receive(:invalidate_issue_cache_counts) service.execute(issue) end it 'does not change escalation status' do resolved = IncidentManagement::Escalatable::STATUSES[:resolved] expect { service.execute(issue) } .to not_change { IncidentManagement::IssuableEscalationStatus.where(issue: issue).count } .and not_change { IncidentManagement::IssuableEscalationStatus.where(status: resolved).count } end context 'issue is incident type' do let(:issue) { create(:incident, project: project) } let(:current_user) { user } subject { service.execute(issue) } it_behaves_like 'an incident management tracked event', :incident_management_incident_closed it_behaves_like 'Snowplow event tracking with RedisHLL context' do let(:namespace) { issue.namespace } let(:category) { described_class.to_s } let(:action) { 'incident_management_incident_closed' } let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' } end it 'creates a new escalation resolved escalation status', :aggregate_failures do expect { service.execute(issue) }.to change { IncidentManagement::IssuableEscalationStatus.where(issue: issue).count }.by(1) expect(issue.incident_management_issuable_escalation_status).to be_resolved end context 'when there is an escalation status' do before do create(:incident_management_issuable_escalation_status, issue: issue) end it 'changes escalations status to resolved' do expect { service.execute(issue) }.to change { issue.incident_management_issuable_escalation_status.reload.resolved? }.to(true) end it 'adds a system note', :aggregate_failures do expect { service.execute(issue) }.to change { issue.notes.count }.by(1) new_note = issue.notes.last expect(new_note.note).to eq('changed the incident status to **Resolved** by closing the incident') expect(new_note.author).to eq(user) end it 'adds a timeline event', :aggregate_failures do expect(IncidentManagement::TimelineEvents::CreateService) .to receive(:resolve_incident) .with(issue, user) .and_call_original expect { service.execute(issue) }.to change { issue.incident_management_timeline_events.count }.by(1) end context 'when the escalation status did not change to resolved' do let(:escalation_status) { instance_double('IncidentManagement::IssuableEscalationStatus', resolve: false, status_name: 'acknowledged') } before do allow(issue).to receive(:incident_management_issuable_escalation_status).and_return(escalation_status) end it 'does not create a system note' do expect { service.execute(issue) }.not_to change { issue.notes.count } end it 'does not create a timeline event' do expect { service.execute(issue) }.not_to change { issue.incident_management_timeline_events.count } end end end end end describe '#close_issue' do context 'with external issue' do context 'with an active external issue tracker supporting close_issue' do let!(:external_issue_tracker) { create(:jira_integration, project: project) } it 'closes the issue on the external issue tracker' do project.reload expect(project.external_issue_tracker).to receive(:close_issue) described_class.new(container: project, current_user: user).close_issue(external_issue) end end context 'with inactive external issue tracker supporting close_issue' do let!(:external_issue_tracker) { create(:jira_integration, project: project, active: false) } it 'does not close the issue on the external issue tracker' do project.reload expect(project.external_issue_tracker).not_to receive(:close_issue) described_class.new(container: project, current_user: user).close_issue(external_issue) end end context 'with an active external issue tracker not supporting close_issue' do let!(:external_issue_tracker) { create(:bugzilla_integration, project: project) } it 'does not close the issue on the external issue tracker' do project.reload expect(project.external_issue_tracker).not_to receive(:close_issue) described_class.new(container: project, current_user: user).close_issue(external_issue) end end end context "closed by a merge request" do subject(:close_issue) do perform_enqueued_jobs do described_class.new(container: project, current_user: user).close_issue(issue, closed_via: closing_merge_request) end end it 'mentions closure via a merge request' do expect_next_instance_of(NotificationService::Async) do |service| expect(service).to receive(:close_issue).with(issue, user, { closed_via: closing_merge_request }) end close_issue end it_behaves_like 'records an onboarding progress action', :issue_auto_closed do let(:namespace) { project.namespace } end context 'updating `metrics.first_mentioned_in_commit_at`' do context 'when `metrics.first_mentioned_in_commit_at` is not set' do it 'uses the first commit authored timestamp' do expected = closing_merge_request.commits.take(100).last.authored_date close_issue expect(issue.metrics.first_mentioned_in_commit_at).to eq(expected) end end context 'when `metrics.first_mentioned_in_commit_at` is already set' do before do issue.metrics.update!(first_mentioned_in_commit_at: Time.current) end it 'does not update the metrics' do expect { close_issue }.not_to change { issue.metrics.first_mentioned_in_commit_at } end end context 'when merge request has no commits' do let(:closing_merge_request) { create(:merge_request, :without_diffs, source_project: project) } it 'does not update the metrics' do close_issue expect(issue.metrics.first_mentioned_in_commit_at).to be_nil end end end end context "closed by a commit", :sidekiq_might_not_need_inline do it 'mentions closure via a commit' do expect_next_instance_of(NotificationService::Async) do |service| expect(service).to receive(:close_issue).with(issue, user, { closed_via: "commit #{closing_commit.id}" }) end described_class.new(container: project, current_user: user).close_issue(issue, closed_via: closing_commit) end end context "valid params" do subject(:close_issue) do perform_enqueued_jobs do described_class.new(container: project, current_user: user).close_issue(issue) end end it 'verifies the number of queries' do recorded = ActiveRecord::QueryRecorder.new { close_issue } expected_queries = 30 expect(recorded.count).to be <= expected_queries expect(recorded.cached_count).to eq(0) end it 'closes the issue' do close_issue expect(issue).to be_valid expect(issue).to be_closed end it 'records closed user' do close_issue expect(issue.reload.closed_by_id).to be(user.id) end it 'sends notification', :sidekiq_might_not_need_inline do expect_next_instance_of(NotificationService::Async) do |service| expect(service).to receive(:close_issue).with(issue, user, { closed_via: nil }) end close_issue end it 'creates resource state event about the issue being closed' do close_issue event = issue.resource_state_events.last expect(event.state).to eq('closed') end it 'marks todos as done' do close_issue expect(todo.reload).to be_done end context 'when closing the issue fails' do it 'does not assign a closed_by value for the issue' do allow(issue).to receive(:close).and_return(false) close_issue expect(issue.closed_by_id).to be_nil end end context 'when there is an associated Alert Management Alert' do context 'when alert can be resolved' do it 'resolves an alert and sends a system note' do alert = create(:alert_management_alert, issue: issue, project: project) expect(SystemNoteService).to receive(:change_alert_status) .with(alert, Users::Internal.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}") close_issue expect(alert.reload).to be_resolved end end context 'when alert cannot be resolved' do before do allow(Gitlab::AppLogger).to receive(:warn).and_call_original end it 'writes a warning into the log' do alert = create(:alert_management_alert, :with_validation_errors, issue: issue, project: project) close_issue expect(Gitlab::AppLogger).to have_received(:warn).with( message: 'Cannot resolve an associated Alert Management alert', issue_id: issue.id, alert_id: alert.id, alert_errors: { hosts: ['hosts array is over 255 chars'] } ) end end end context 'when there are several associated Alert Management Alerts' do context 'when alerts can be resolved' do it 'resolves an alert and sends a system note', :aggregate_failures do alerts = create_list(:alert_management_alert, 2, issue: issue, project: project) alerts.each do |alert| expect(SystemNoteService).to receive(:change_alert_status) .with(alert, Users::Internal.alert_bot, " because #{user.to_reference} closed incident #{issue.to_reference(project)}") end close_issue expect(alerts.map(&:reload)).to all(be_resolved) end end end it 'deletes milestone issue counters cache' do issue.update!(milestone: create(:milestone, project: project)) expect_next_instance_of(Milestones::ClosedIssuesCountService, issue.milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end close_issue end it_behaves_like 'does not record an onboarding progress action' end context 'when issue is not confidential' do let(:expected_payload) do include( event_type: 'issue', object_kind: 'issue', changes: { closed_at: { current: kind_of(Time), previous: nil }, state_id: { current: 2, previous: 1 }, updated_at: { current: kind_of(Time), previous: kind_of(Time) } }, object_attributes: include( closed_at: kind_of(Time), state: 'closed', action: 'close' ) ) end it 'executes issue hooks' do expect(delegated_project).to receive(:execute_hooks).with(expected_payload, :issue_hooks) expect(delegated_project).to receive(:execute_integrations).with(expected_payload, :issue_hooks) described_class.new(container: delegated_project, current_user: user).close_issue(issue) end end context 'when issue is confidential' do it 'executes confidential issue hooks' do issue = create(:issue, :confidential, project: project) expect(delegated_project).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks) expect(delegated_project).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks) described_class.new(container: project, current_user: user).close_issue(issue) end end context 'internal issues disabled' do before do project.issues_enabled = false project.save! end it 'does not close the issue' do expect(issue).to be_valid expect(issue).to be_opened expect(todo.reload).to be_pending end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class AfterCreateService < Issues::BaseService def execute(issue) todo_service.new_issue(issue, current_user) track_incident_action(current_user, issue, :incident_created) end end end Issues::AfterCreateService.prepend_mod ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::AfterCreateService, feature_category: :team_planning do include AfterNextHelpers let_it_be(:project) { create(:project) } let_it_be(:current_user) { create(:user) } let_it_be(:assignee) { create(:user) } let_it_be(:milestone) { create(:milestone, project: project) } let_it_be(:issue) { create(:issue, project: project, author: current_user, milestone: milestone, assignee_ids: [assignee.id]) } subject(:after_create_service) { described_class.new(container: project, current_user: current_user) } describe '#execute' do it 'creates a pending todo for new assignee' do attributes = { project: project, author: current_user, user: assignee, target_id: issue.id, target_type: issue.class.name, action: Todo::ASSIGNED, state: :pending } expect { after_create_service.execute(issue) }.to change { Todo.where(attributes).count }.by(1) end context 'with a regular issue' do it_behaves_like 'does not track incident management event', :incident_management_incident_created do subject { after_create_service.execute(issue) } end end context 'with an incident issue' do let(:issue) { create(:issue, :incident, project: project, author: current_user) } it_behaves_like 'an incident management tracked event', :incident_management_incident_created do subject { after_create_service.execute(issue) } end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class ZoomLinkService < Issues::BaseService def initialize(container:, current_user:, params:) super @issue = params.fetch(:issue) @added_meeting = ZoomMeeting.canonical_meeting(@issue) end def add_link(link) if can_add_link? && (link = parse_link(link)) begin add_zoom_meeting(link) rescue ActiveRecord::RecordNotUnique error(message: _('Failed to add a Zoom meeting')) end else error(message: _('Failed to add a Zoom meeting')) end end def remove_link if can_remove_link? remove_zoom_meeting success(message: _('Zoom meeting removed')) else error(message: _('Failed to remove a Zoom meeting')) end end def can_add_link? can_change_link? && !@added_meeting end def can_remove_link? can_change_link? && @issue.persisted? && !!@added_meeting end def parse_link(link) Gitlab::ZoomLinkExtractor.new(link).links.last end private attr_reader :issue def track_meeting_added_event ::Gitlab::Tracking.event('IncidentManagement::ZoomIntegration', 'add_zoom_meeting', label: 'Issue ID', value: issue.id, user: current_user, project: @project, namespace: @project.namespace) end def track_meeting_removed_event ::Gitlab::Tracking.event('IncidentManagement::ZoomIntegration', 'remove_zoom_meeting', label: 'Issue ID', value: issue.id, user: current_user, project: @project, namespace: @project.namespace) end def add_zoom_meeting(link) zoom_meeting = new_zoom_meeting(link) response = if @issue.persisted? # Save the meeting directly since we only want to update one meeting, not all zoom_meeting.save track_incident_action(current_user, issue, :incident_zoom_meeting) success(message: _('Zoom meeting added')) else success(message: _('Zoom meeting added'), payload: { zoom_meetings: [zoom_meeting] }) end track_meeting_added_event SystemNoteService.zoom_link_added(@issue, @project, current_user) response end def new_zoom_meeting(link) ZoomMeeting.new( issue: @issue, project: @project, issue_status: :added, url: link ) end def remove_zoom_meeting @added_meeting.update(issue_status: :removed) track_meeting_removed_event SystemNoteService.zoom_link_removed(@issue, @project, current_user) end def success(message:, payload: nil) ServiceResponse.success(message: message, payload: payload) end def error(message:) ServiceResponse.error(message: message) end def can_change_link? if @issue.persisted? can?(current_user, :update_issue, @project) else can?(current_user, :create_issue, @project) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::ZoomLinkService, feature_category: :team_planning do let_it_be(:user) { create(:user) } let_it_be(:issue) { create(:issue) } let(:project) { issue.project } let(:service) { described_class.new(container: project, current_user: user, params: { issue: issue }) } let(:zoom_link) { 'https://zoom.us/j/123456789' } before do project.add_reporter(user) end shared_context '"added" Zoom meeting' do before do create(:zoom_meeting, issue: issue) end end shared_context '"removed" zoom meetings' do before do create(:zoom_meeting, issue: issue, issue_status: :removed) create(:zoom_meeting, issue: issue, issue_status: :removed) end end shared_context 'insufficient issue update permissions' do before do project.add_guest(user) end end shared_context 'insufficient issue create permissions' do before do expect(service).to receive(:can?).with(user, :create_issue, project).and_return(false) end end describe '#add_link' do shared_examples 'can add meeting' do it 'appends the new meeting to zoom_meetings' do expect(result).to be_success expect(ZoomMeeting.canonical_meeting_url(issue)).to eq(zoom_link) end it 'tracks the add event', :snowplow do result expect_snowplow_event( category: 'IncidentManagement::ZoomIntegration', action: 'add_zoom_meeting', label: 'Issue ID', value: issue.id, user: user, project: project, namespace: project.namespace ) end it 'creates a zoom_link_added notification' do expect(SystemNoteService).to receive(:zoom_link_added).with(issue, project, user) expect(SystemNoteService).not_to receive(:zoom_link_removed) result end end shared_examples 'cannot add meeting' do it 'cannot add the meeting' do expect(result).to be_error expect(result.message).to eq('Failed to add a Zoom meeting') end it 'creates no notification' do expect(SystemNoteService).not_to receive(:zoom_link_added) expect(SystemNoteService).not_to receive(:zoom_link_removed) result end end subject(:result) { service.add_link(zoom_link) } context 'without existing Zoom meeting' do context 'when updating an issue' do before do allow(issue).to receive(:persisted?).and_return(true) end include_examples 'can add meeting' context 'issue is incident type' do let(:issue) { create(:incident) } let(:current_user) { user } it_behaves_like 'an incident management tracked event', :incident_management_incident_zoom_meeting it_behaves_like 'Snowplow event tracking with RedisHLL context' do let(:namespace) { issue.namespace } let(:category) { described_class.to_s } let(:action) { 'incident_management_incident_zoom_meeting' } let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' } end end context 'with insufficient issue update permissions' do include_context 'insufficient issue update permissions' include_examples 'cannot add meeting' end end context 'when creating an issue' do before do allow(issue).to receive(:persisted?).and_return(false) end it 'creates a new zoom meeting' do expect(result).to be_success expect(result.payload[:zoom_meetings][0].url).to eq(zoom_link) end context 'with insufficient issue create permissions' do include_context 'insufficient issue create permissions' include_examples 'cannot add meeting' end end context 'with invalid Zoom url' do let(:zoom_link) { 'https://not-zoom.link' } include_examples 'cannot add meeting' end end context 'with "added" Zoom meeting' do include_context '"added" Zoom meeting' include_examples 'cannot add meeting' end context 'with "added" Zoom meeting and race condition' do include_context '"added" Zoom meeting' before do allow(service).to receive(:can_add_link?).and_return(true) allow(issue).to receive(:persisted?).and_return(true) end include_examples 'cannot add meeting' end end describe '#can_add_link?' do subject { service.can_add_link? } context 'without "added" zoom meeting' do it { is_expected.to eq(true) } context 'with insufficient issue update permissions' do include_context 'insufficient issue update permissions' it { is_expected.to eq(false) } end end context 'with Zoom meeting in the issue description' do include_context '"added" Zoom meeting' it { is_expected.to eq(false) } end end describe '#remove_link' do shared_examples 'cannot remove meeting' do it 'cannot remove the meeting' do expect(result).to be_error expect(result.message).to eq('Failed to remove a Zoom meeting') end it 'creates no notification' do expect(SystemNoteService).not_to receive(:zoom_link_added) expect(SystemNoteService).not_to receive(:zoom_link_removed) result end end shared_examples 'can remove meeting' do it 'creates no notification' do expect(SystemNoteService).not_to receive(:zoom_link_added).with(issue, project, user) expect(SystemNoteService).to receive(:zoom_link_removed) result end it 'can remove the meeting' do expect(result).to be_success expect(ZoomMeeting.canonical_meeting_url(issue)).to eq(nil) end it 'tracks the remove event', :snowplow do result expect_snowplow_event( category: 'IncidentManagement::ZoomIntegration', action: 'remove_zoom_meeting', label: 'Issue ID', value: issue.id, user: user, project: project, namespace: project.namespace ) end end subject(:result) { service.remove_link } context 'with Zoom meeting' do include_context '"added" Zoom meeting' context 'with existing issue' do before do allow(issue).to receive(:persisted?).and_return(true) end include_examples 'can remove meeting' end context 'without existing issue' do before do allow(issue).to receive(:persisted?).and_return(false) end include_examples 'cannot remove meeting' end context 'with insufficient issue update permissions' do include_context 'insufficient issue update permissions' include_examples 'cannot remove meeting' end end context 'without "added" Zoom meeting' do include_context '"removed" zoom meetings' include_examples 'cannot remove meeting' end end describe '#can_remove_link?' do subject { service.can_remove_link? } context 'without Zoom meeting' do it { is_expected.to eq(false) } end context 'with only "removed" zoom meetings' do include_context '"removed" zoom meetings' it { is_expected.to eq(false) } end context 'with "added" Zoom meeting' do include_context '"added" Zoom meeting' it { is_expected.to eq(true) } context 'with "removed" zoom meetings' do include_context '"removed" zoom meetings' it { is_expected.to eq(true) } end context 'with insufficient issue update permissions' do include_context 'insufficient issue update permissions' it { is_expected.to eq(false) } end end end describe '#parse_link' do subject { service.parse_link(description) } context 'with valid Zoom links' do where(:description) do [ 'Some text https://zoom.us/j/123456789 more text', 'Mixed https://zoom.us/j/123456789 http://example.com', 'Multiple link https://zoom.us/my/name https://zoom.us/j/123456789' ] end with_them do it { is_expected.to eq('https://zoom.us/j/123456789') } end end context 'with invalid Zoom links' do where(:description) do [ nil, '', 'Text only', 'Non-Zoom http://example.com', 'Almost Zoom http://zoom.us' ] end with_them do it { is_expected.to eq(nil) } end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class UpdateService < Issues::BaseService # NOTE: For Issues::UpdateService, we default perform_spam_check to false, because spam_checking is not # necessary in many cases, and we don't want to require every caller to explicitly pass it # to disable spam checking. def initialize(container:, current_user: nil, params: {}, perform_spam_check: false) super(container: container, current_user: current_user, params: params) @perform_spam_check = perform_spam_check end def execute(issue) handle_move_between_ids(issue) change_issue_duplicate(issue) move_issue_to_new_project(issue) || clone_issue(issue) || update_task_event(issue) || update(issue) end def update(issue) create_merge_request_from_quick_action super end def before_update(issue, skip_spam_check: false) change_work_item_type(issue) return if skip_spam_check || !perform_spam_check issue.check_for_spam(user: current_user, action: :update) end def change_work_item_type(issue) return unless params[:issue_type].present? type_id = find_work_item_type_id(params[:issue_type]) issue.work_item_type_id = type_id end def handle_changes(issue, options) super old_associations = options.fetch(:old_associations, {}) old_labels = old_associations.fetch(:labels, []) old_mentioned_users = old_associations.fetch(:mentioned_users, []) old_assignees = old_associations.fetch(:assignees, []) old_severity = old_associations[:severity] if has_changes?(issue, old_labels: old_labels, old_assignees: old_assignees) todo_service.resolve_todos_for_target(issue, current_user) end if issue.previous_changes.include?('title') || issue.previous_changes.include?('description') todo_service.update_issue(issue, current_user, old_mentioned_users) end handle_assignee_changes(issue, old_assignees) handle_confidential_change(issue) handle_added_labels(issue, old_labels) handle_added_mentions(issue, old_mentioned_users) handle_severity_change(issue, old_severity) handle_escalation_status_change(issue) handle_issue_type_change(issue) handle_date_changes(issue) end def handle_assignee_changes(issue, old_assignees) return if issue.assignees == old_assignees create_assignee_note(issue, old_assignees) Gitlab::ResourceEvents::AssignmentEventRecorder.new(parent: issue, old_assignees: old_assignees).record notification_service.async.reassigned_issue(issue, current_user, old_assignees) todo_service.reassigned_assignable(issue, current_user, old_assignees) track_incident_action(current_user, issue, :incident_assigned) GraphqlTriggers.issuable_assignees_updated(issue) end def handle_task_changes(issuable) todo_service.resolve_todos_for_target(issuable, current_user) todo_service.update_issue(issuable, current_user) end # rubocop: disable CodeReuse/ActiveRecord def change_issue_duplicate(issue) canonical_issue_id = params.delete(:canonical_issue_id) return unless canonical_issue_id canonical_issue = IssuesFinder.new(current_user).find_by(id: canonical_issue_id) if canonical_issue Issues::DuplicateService.new(container: project, current_user: current_user).execute(issue, canonical_issue) end end # rubocop: enable CodeReuse/ActiveRecord def move_issue_to_new_project(issue) target_project = params.delete(:target_project) return unless target_project && issue.can_move?(current_user, target_project) && target_project != issue.project update(issue) Issues::MoveService.new(container: project, current_user: current_user).execute(issue, target_project) end private attr_reader :perform_spam_check override :after_update def after_update(issue, _old_associations) super GraphqlTriggers.work_item_updated(issue) end def handle_date_changes(issue) return unless issue.previous_changes.slice('due_date', 'start_date').any? GraphqlTriggers.issuable_dates_updated(issue) end def clone_issue(issue) target_project = params.delete(:target_clone_project) with_notes = params.delete(:clone_with_notes) return unless target_project && issue.can_clone?(current_user, target_project) # we've pre-empted this from running in #execute, so let's go ahead and update the Issue now. update(issue) Issues::CloneService.new(container: project, current_user: current_user).execute(issue, target_project, with_notes: with_notes) end def create_merge_request_from_quick_action create_merge_request_params = params.delete(:create_merge_request) return unless create_merge_request_params MergeRequests::CreateFromIssueService.new(project: project, current_user: current_user, mr_params: create_merge_request_params).execute end def handle_confidential_change(issue) if issue.previous_changes.include?('confidential') # don't enqueue immediately to prevent todos removal in case of a mistake TodosDestroyer::ConfidentialIssueWorker.perform_in(Todo::WAIT_FOR_DELETE, issue.id) if issue.confidential? create_confidentiality_note(issue) track_incident_action(current_user, issue, :incident_change_confidential) end end def handle_added_labels(issue, old_labels) added_labels = issue.labels - old_labels if added_labels.present? notification_service.async.relabeled_issue(issue, added_labels, current_user) end end def handle_added_mentions(issue, old_mentioned_users) added_mentions = issue.mentioned_users(current_user) - old_mentioned_users if added_mentions.present? notification_service.async.new_mentions_in_issue(issue, added_mentions, current_user) end end def handle_severity_change(issue, old_severity) return unless old_severity && issue.severity != old_severity ::IncidentManagement::AddSeveritySystemNoteWorker.perform_async(issue.id, current_user.id) end def create_confidentiality_note(issue) SystemNoteService.change_issue_confidentiality(issue, issue.project, current_user) end def handle_issue_type_change(issue) return unless issue.previous_changes.include?('work_item_type_id') do_handle_issue_type_change(issue) end def do_handle_issue_type_change(issue) old_work_item_type = ::WorkItems::Type.find(issue.work_item_type_id_before_last_save).base_type SystemNoteService.change_issue_type(issue, current_user, old_work_item_type) ::IncidentManagement::IssuableEscalationStatuses::CreateService.new(issue).execute if issue.supports_escalation? end override :allowed_update_params def allowed_update_params(params) super.except(:issue_type) end end end Issues::UpdateService.prepend_mod_with('Issues::UpdateService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::UpdateService, :mailer, feature_category: :team_planning do let_it_be(:user) { create(:user) } let_it_be(:user2) { create(:user) } let_it_be(:user3) { create(:user) } let_it_be(:guest) { create(:user) } let_it_be(:group) { create(:group, :public, :crm_enabled) } let_it_be(:project, reload: true) { create(:project, :repository, group: group) } let_it_be(:label) { create(:label, title: 'a', project: project) } let_it_be(:label2) { create(:label, title: 'b', project: project) } let_it_be(:label3) { create(:label, title: 'c', project: project) } let_it_be(:milestone) { create(:milestone, project: project) } let(:container) { project } let(:issue) do create( :issue, title: 'Old title', description: "for #{user2.to_reference}", assignee_ids: [user3.id], project: project, author: create(:user) ) end before_all do group.add_maintainer(user) group.add_developer(user2) group.add_developer(user3) group.add_guest(guest) end describe 'execute' do let_it_be(:contact) { create(:contact, group: group) } def find_note(starting_with) issue.notes.find do |note| note && note.note.start_with?(starting_with) end end def find_notes(action) issue .notes .joins(:system_note_metadata) .where(system_note_metadata: { action: action }) end def update_issue(opts) described_class.new(container: container, current_user: user, params: opts).execute(issue) end it_behaves_like 'issuable update service updating last_edited_at values' do let(:issuable) { issue } subject(:update_issuable) { update_issue(update_params) } end context 'valid params' do let(:opts) do { title: 'New title', description: 'Also please fix', assignee_ids: [user2.id], state_event: 'close', label_ids: [label&.id], due_date: Date.tomorrow, discussion_locked: true, severity: 'low', milestone_id: milestone.id, add_contacts: [contact.email] } end context 'when an unauthorized project_id is provided' do let(:unauthorized_project) { create(:project) } before do opts[:project_id] = unauthorized_project.id end it 'ignores the project_id param and does not update the issue\'s project' do expect do update_issue(opts) unauthorized_project.reload end.to not_change { unauthorized_project.issues.count } expect(issue.project).to eq(project) end end it 'updates the issue with the given params' do expect(TodosDestroyer::ConfidentialIssueWorker).not_to receive(:perform_in) update_issue(opts) expect(issue).to be_valid expect(issue.title).to eq 'New title' expect(issue.description).to eq 'Also please fix' expect(issue.assignees).to match_array([user2]) expect(issue).to be_closed expect(issue.labels).to match_array [label] expect(issue.due_date).to eq Date.tomorrow expect(issue.discussion_locked).to be_truthy expect(issue.confidential).to be_falsey expect(issue.milestone).to eq milestone expect(issue.issue_customer_relations_contacts.last.contact).to eq contact end it_behaves_like 'update service that triggers GraphQL work_item_updated subscription' do subject(:execute_service) { update_issue(opts) } end context 'when updating milestone' do before do update_issue({ milestone_id: nil }) end it 'updates issue milestone when passing `milestone` param' do expect { update_issue({ milestone_id: milestone.id }) } .to change(issue, :milestone).to(milestone).from(nil) end it "triggers 'issuableMilestoneUpdated'" do expect(GraphqlTriggers).to receive(:issuable_milestone_updated).with(issue).and_call_original update_issue({ milestone_id: milestone.id }) end context 'when milestone remains unchanged' do before do update_issue({ title: 'abc', milestone_id: milestone.id }) end it "does not trigger 'issuableMilestoneUpdated'" do expect(GraphqlTriggers).not_to receive(:issuable_milestone_updated) update_issue({ milestone_id: milestone.id }) end end end context 'when sentry identifier is given' do before do sentry_attributes = { sentry_issue_attributes: { sentry_issue_identifier: 42 } } opts.merge!(sentry_attributes) end it 'assigns the sentry error' do update_issue(opts) expect(issue.sentry_issue).to be_kind_of(SentryIssue) end context 'user is a guest' do let(:user) { guest } it 'does not assign the sentry error' do update_issue(opts) expect(issue.sentry_issue).to eq(nil) end end end context 'when issue type is not incident' do before do update_issue(opts) end it_behaves_like 'not an incident issue' context 'when confidentiality is changed' do subject { update_issue(confidential: true) } it_behaves_like 'does not track incident management event' end end context 'when issue type is incident' do let(:issue) { create(:incident, project: project) } before do update_issue(opts) end it_behaves_like 'incident issue' it 'does not add an incident label' do expect(issue.labels).to match_array [label] end context 'when confidentiality is changed' do let(:current_user) { user } subject { update_issue(confidential: true) } it_behaves_like 'an incident management tracked event', :incident_management_incident_change_confidential it_behaves_like 'Snowplow event tracking with RedisHLL context' do let(:namespace) { issue.namespace } let(:category) { described_class.to_s } let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' } let(:action) { 'incident_management_incident_change_confidential' } let(:opts) do { title: 'New title', description: 'Also please fix', assignee_ids: [user2.id], state_event: 'close', due_date: Date.tomorrow, discussion_locked: true, severity: 'low', milestone_id: milestone.id, add_contacts: [contact.email] } end end end end it 'refreshes the number of open issues when the issue is made confidential', :use_clean_rails_memory_store_caching do issue # make sure the issue is created first so our counts are correct. expect do update_issue(confidential: true) BatchLoader::Executor.clear_current end.to change { project.open_issues_count }.from(1).to(0) end it 'enqueues ConfidentialIssueWorker when an issue is made confidential' do expect(TodosDestroyer::ConfidentialIssueWorker).to receive(:perform_in).with(Todo::WAIT_FOR_DELETE, issue.id) update_issue(confidential: true) expect(issue.confidential).to be_truthy end it 'does not enqueue ConfidentialIssueWorker when an issue is made non confidential' do # set confidentiality to true before the actual update issue.update!(confidential: true) expect(TodosDestroyer::ConfidentialIssueWorker).not_to receive(:perform_in) update_issue(confidential: false) expect(issue.confidential).to be_falsey end context 'changing issue_type' do let!(:label_1) { create(:label, project: project, title: 'incident') } let!(:label_2) { create(:label, project: project, title: 'missed-sla') } before do stub_licensed_features(quality_management: true) end context 'from issue to incident' do it_behaves_like 'incident issue' do before do update_issue(**opts, issue_type: 'incident') end end it 'creates system note about issue type' do update_issue(issue_type: 'incident') note = find_note('changed type from issue to incident') expect(note).not_to eq(nil) end it 'creates an escalation status' do expect { update_issue(issue_type: 'incident') } .to change { issue.reload.incident_management_issuable_escalation_status } .from(nil) .to(a_kind_of(IncidentManagement::IssuableEscalationStatus)) end context 'for an issue with multiple labels' do let(:issue) { create(:incident, project: project, labels: [label_1]) } before do update_issue(issue_type: 'incident') end it 'does not add an `incident` label if one already exist' do expect(issue.labels).to eq([label_1]) end end end context 'from incident to issue' do let(:issue) { create(:incident, project: project) } it 'changed from an incident to an issue type' do expect { update_issue(issue_type: 'issue') } .to change(issue, :issue_type).from('incident').to('issue') .and(change { issue.work_item_type.base_type }.from('incident').to('issue')) end context 'for an incident with multiple labels' do let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) } it 'does not remove an `incident` label if one exists on the incident' do expect { update_issue(issue_type: 'issue') }.to not_change(issue, :label_ids) end end context 'filtering the incident label' do let(:issue) { create(:incident, project: project, labels: [label_1, label_2]) } let(:params) { { label_ids: [label_1.id, label_2.id], remove_label_ids: [] } } it 'does not add an incident label id to remove_label_ids for it to be removed' do expect { update_issue(issue_type: 'issue') }.to not_change(issue, :label_ids) end end end context 'from issue to restricted issue types' do context 'without sufficient permissions' do let(:user) { guest } it 'does nothing to the labels' do expect { update_issue(issue_type: 'issue') }.not_to change(issue.labels, :count) expect(issue.reload.labels).to eq([]) end end end end it 'updates open issue counter for assignees when issue is reassigned' do update_issue(assignee_ids: [user2.id]) expect(user3.assigned_open_issues_count).to eq 0 expect(user2.assigned_open_issues_count).to eq 1 end context 'when changing relative position' do let(:issue1) { create(:issue, project: project, assignees: [user3]) } let(:issue2) { create(:issue, project: project, assignees: [user3]) } before do [issue, issue1, issue2].each do |issue| issue.move_to_end issue.save! end end it 'sorts issues as specified by parameters' do opts[:move_between_ids] = [issue1.id, issue2.id] update_issue(opts) expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position) end context 'when block_issue_positioning flag is enabled' do before do stub_feature_flags(block_issue_repositioning: true) end it 'raises error' do old_position = issue.relative_position opts[:move_between_ids] = [issue1.id, issue2.id] expect { update_issue(opts) }.to raise_error(::Gitlab::RelativePositioning::IssuePositioningDisabled) expect(issue.reload.relative_position).to eq(old_position) end end end it 'rebalances if needed on the left' do range = described_class::NO_REBALANCING_NEEDED issue1 = create(:issue, project: project, relative_position: range.first - 100) issue2 = create(:issue, project: project, relative_position: range.first) issue.update!(relative_position: RelativePositioning::START_POSITION) opts[:move_between_ids] = [issue1.id, issue2.id] expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id) update_issue(opts) expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position) end it 'rebalances if needed on the right' do range = described_class::NO_REBALANCING_NEEDED issue1 = create(:issue, project: project, relative_position: range.last) issue2 = create(:issue, project: project, relative_position: range.last + 100) issue.update!(relative_position: RelativePositioning::START_POSITION) opts[:move_between_ids] = [issue1.id, issue2.id] expect(Issues::RebalancingWorker).to receive(:perform_async).with(nil, nil, project.root_namespace.id) update_issue(opts) expect(issue.relative_position).to be_between(issue1.relative_position, issue2.relative_position) end context 'when moving issue between issues from different projects' do let(:group) { create(:group) } let(:subgroup) { create(:group, parent: group) } let(:project_1) { create(:project, namespace: group) } let(:project_2) { create(:project, namespace: group) } let(:project_3) { create(:project, namespace: subgroup) } let(:issue_1) { create(:issue, project: project_1) } let(:issue_2) { create(:issue, project: project_2) } let(:issue_3) { create(:issue, project: project_3) } before do group.add_developer(user) end it 'sorts issues as specified by parameters' do # Moving all issues to end here like the last example won't work since # all projects only have the same issue count # so their relative_position will be the same. issue_1.move_to_end issue_2.move_after(issue_1) issue_3.move_after(issue_2) [issue_1, issue_2, issue_3].map(&:save) opts[:move_between_ids] = [issue_1.id, issue_2.id] described_class.new(container: issue_3.project, current_user: user, params: opts).execute(issue_3) expect(issue_2.relative_position).to be_between(issue_1.relative_position, issue_2.relative_position) end end context 'when current user cannot admin issues in the project' do it 'filters out params that cannot be set without the :admin_issue permission' do described_class.new( container: project, current_user: guest, params: opts.merge( confidential: true, issue_type: 'test_case' ) ).execute(issue) expect(issue).to be_valid expect(issue.title).to eq 'New title' expect(issue.description).to eq 'Also please fix' expect(issue.assignees).to match_array [user3] expect(issue.labels).to be_empty expect(issue.milestone).to be_nil expect(issue.due_date).to be_nil expect(issue.discussion_locked).to be_falsey expect(issue.confidential).to be_falsey expect(issue.issue_type).to eql('issue') end end context 'with background jobs processed', :sidekiq_might_not_need_inline do before do perform_enqueued_jobs do update_issue(opts) end end it 'sends email to user2 about assign of new issue and email to user3 about issue unassignment' do deliveries = ActionMailer::Base.deliveries email = deliveries.last recipients = deliveries.last(2).flat_map(&:to) expect(recipients).to include(user2.email, user3.email) expect(email.subject).to include(issue.title) end it 'creates system note about issue reassign' do note = find_note('assigned to') expect(note.note).to include "assigned to #{user2.to_reference}" end it 'creates a resource label event' do event = issue.resource_label_events.last expect(event).not_to be_nil expect(event.label_id).to eq label.id expect(event.user_id).to eq user.id end it 'creates system note about title change' do note = find_note('changed title') expect(note.note).to eq 'changed title from **{-Old-} title** to **{+New+} title**' end it 'creates system note about discussion lock' do note = find_note('locked the discussion in this issue') expect(note.note).to eq 'locked the discussion in this issue' end end context 'after_save callback to store_mentions' do let(:issue) { create(:issue, title: 'Old title', description: "simple description", project: project, author: create(:user)) } let(:labels) { create_pair(:label, project: project) } let(:milestone) { create(:milestone, project: project) } context 'when mentionable attributes change' do let(:opts) { { description: "Description with #{user.to_reference}" } } it 'saves mentions' do expect(issue).to receive(:store_mentions!).and_call_original expect { update_issue(opts) }.to change { IssueUserMention.count }.by(1) expect(issue.referenced_users).to match_array([user]) end end context 'when mentionable attributes do not change' do let(:opts) { { label_ids: labels.map(&:id), milestone_id: milestone.id } } it 'does not call store_mentions' do expect(issue).not_to receive(:store_mentions!).and_call_original expect { update_issue(opts) }.not_to change { IssueUserMention.count } expect(issue.referenced_users).to be_empty end end context 'when save fails' do let(:opts) { { title: '', label_ids: labels.map(&:id), milestone_id: milestone.id } } it 'does not call store_mentions' do expect(issue).not_to receive(:store_mentions!).and_call_original expect { update_issue(opts) }.not_to change { IssueUserMention.count } expect(issue.referenced_users).to be_empty expect(issue.valid?).to be false end end end end context 'when description changed' do it 'creates system note about description change' do update_issue(description: 'Changed description') note = find_note('changed the description') expect(note.note).to eq('changed the description') end it 'triggers GraphQL description updated subscription' do expect(GraphqlTriggers).to receive(:issuable_description_updated).with(issue).and_call_original update_issue(description: 'Changed description') end end context 'when description is not changed' do it 'does not trigger GraphQL description updated subscription' do expect(GraphqlTriggers).not_to receive(:issuable_description_updated) update_issue(title: 'Changed title') end end context 'when issue turns confidential' do let(:opts) do { title: 'New title', description: 'Also please fix', assignee_ids: [user2], state_event: 'close', label_ids: [label.id], confidential: true } end it 'creates system note about confidentiality change' do update_issue(confidential: true) note = find_note('made the issue confidential') expect(note.note).to eq 'made the issue confidential' end it 'executes confidential issue hooks' do expect(project.project_namespace).to receive(:execute_hooks).with(an_instance_of(Hash), :confidential_issue_hooks) expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :confidential_issue_hooks) update_issue(confidential: true) end it 'does not update assignee_id with unauthorized users' do project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC) update_issue(confidential: true) non_member = create(:user) original_assignees = issue.assignees update_issue(assignee_ids: [non_member.id]) expect(issue.reload.assignees).to eq(original_assignees) end end context 'todos' do let!(:todo) { create(:todo, :assigned, user: user, project: project, target: issue, author: user2) } context 'when the title change' do before do update_issue(title: 'New title') end it 'marks pending todos as done' do expect(todo.reload.done?).to eq true end it 'does not create any new todos' do expect(Todo.count).to eq(1) end end context 'when the description change' do before do update_issue(description: "Also please fix #{user2.to_reference} #{user3.to_reference}") end it 'marks todos as done' do expect(todo.reload.done?).to eq true end it 'creates only 1 new todo' do expect(Todo.count).to eq(2) end end context 'when is reassigned' do before do update_issue(assignees: [user2]) end it 'marks previous assignee todos as done' do expect(todo.reload.done?).to eq true end it 'creates a todo for new assignee' do attributes = { project: project, author: user, user: user2, target_id: issue.id, target_type: issue.class.name, action: Todo::ASSIGNED, state: :pending } expect(Todo.where(attributes).count).to eq 1 end end context 'when a new assignee added' do subject { update_issue(assignees: issue.assignees + [user2]) } it 'creates only 1 new todo' do expect { subject }.to change { Todo.count }.by(1) end it 'creates a todo for new assignee' do subject attributes = { project: project, author: user, user: user2, target_id: issue.id, target_type: issue.class.name, action: Todo::ASSIGNED, state: :pending } expect(Todo.where(attributes).count).to eq(1) end context 'issue is incident type' do let(:issue) { create(:incident, project: project) } let(:current_user) { user } it_behaves_like 'an incident management tracked event', :incident_management_incident_assigned it_behaves_like 'Snowplow event tracking with RedisHLL context' do let(:namespace) { issue.namespace } let(:category) { described_class.to_s } let(:label) { 'redis_hll_counters.incident_management.incident_management_total_unique_counts_monthly' } let(:action) { "incident_management_incident_assigned" } end end end context 'when the milestone is removed' do let!(:non_subscriber) { create(:user) } let!(:subscriber) do create(:user) do |u| issue.toggle_subscription(u, project) project.add_developer(u) end end it 'sends notifications for subscribers of changed milestone', :sidekiq_might_not_need_inline do issue.milestone = create(:milestone, project: project) issue.save! perform_enqueued_jobs do update_issue(milestone_id: "") end should_email(subscriber) should_not_email(non_subscriber) end it 'clears milestone issue counters cache' do issue.milestone = create(:milestone, project: project) issue.save! expect_next_instance_of(Milestones::IssuesCountService, issue.milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end expect_next_instance_of(Milestones::ClosedIssuesCountService, issue.milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end update_issue(milestone_id: "") end end context 'when the milestone is assigned' do let!(:non_subscriber) { create(:user) } let!(:subscriber) do create(:user) do |u| issue.toggle_subscription(u, project) project.add_developer(u) end end it 'marks todos as done' do update_issue(milestone_id: create(:milestone, project: project).id) expect(todo.reload.done?).to eq true end it 'sends notifications for subscribers of changed milestone', :sidekiq_might_not_need_inline do perform_enqueued_jobs do update_issue(milestone_id: create(:milestone, project: project).id) end should_email(subscriber) should_not_email(non_subscriber) end it 'deletes issue counters cache for the milestone' do milestone = create(:milestone, project: project) expect_next_instance_of(Milestones::IssuesCountService, milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end expect_next_instance_of(Milestones::ClosedIssuesCountService, milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end update_issue(milestone_id: milestone.id) end end context 'when the milestone is changed' do it 'deletes issue counters cache for both milestones' do old_milestone = create(:milestone, project: project) new_milestone = create(:milestone, project: project) issue.update!(milestone: old_milestone) expect_next_instance_of(Milestones::IssuesCountService, old_milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end expect_next_instance_of(Milestones::ClosedIssuesCountService, old_milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end expect_next_instance_of(Milestones::IssuesCountService, new_milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end expect_next_instance_of(Milestones::ClosedIssuesCountService, new_milestone) do |service| expect(service).to receive(:delete_cache).and_call_original end update_issue(milestone_id: new_milestone.id) end end context 'when the labels change' do before do travel_to(1.minute.from_now) do update_issue(label_ids: [label.id]) end end it 'marks todos as done' do expect(todo.reload.done?).to eq true end it 'updates updated_at' do expect(issue.reload.updated_at).to be_future end end end context 'when the issue is relabeled' do let!(:non_subscriber) { create(:user) } let!(:subscriber) do create(:user) do |u| label.toggle_subscription(u, project) project.add_developer(u) end end it 'sends notifications for subscribers of newly added labels', :sidekiq_might_not_need_inline do opts = { label_ids: [label.id] } perform_enqueued_jobs do @issue = described_class.new(container: project, current_user: user, params: opts).execute(issue) end should_email(subscriber) should_not_email(non_subscriber) end context 'when issue has the `label` label' do before do issue.labels << label end it 'does not send notifications for existing labels' do opts = { label_ids: [label.id, label2.id] } perform_enqueued_jobs do @issue = described_class.new(container: project, current_user: user, params: opts).execute(issue) end should_not_email(subscriber) should_not_email(non_subscriber) end it 'does not send notifications for removed labels' do opts = { label_ids: [label2.id] } perform_enqueued_jobs do @issue = described_class.new(container: project, current_user: user, params: opts).execute(issue) end should_not_email(subscriber) should_not_email(non_subscriber) end end end context 'when issue has tasks' do before do update_issue(description: "- [ ] Task 1\n- [ ] Task 2") end it { expect(issue.tasks?).to eq(true) } it_behaves_like 'updating a single task' context 'when tasks are marked as completed' do before do update_issue(description: "- [x] Task 1\n- [X] Task 2") end it 'does not check for spam on task status change' do params = { update_task: { index: 1, checked: false, line_source: '- [x] Task 1', line_number: 1 } } service = described_class.new(container: project, current_user: user, params: params) expect(issue).not_to receive(:check_for_spam) service.execute(issue) end # At the moment of writting old associations are not necessary for update_task # and doing this will prevent fetching associations from the DB and comparing old and new labels it 'does not pass old_associations to the after_update method' do params = { update_task: { index: 1, checked: false, line_source: '- [x] Task 1', line_number: 1 } } service = described_class.new(container: project, current_user: user, params: params) expect(service).to receive(:after_update).with(issue, {}) service.execute(issue) end it 'creates system note about task status change' do note1 = find_note('marked the checklist item **Task 1** as completed') note2 = find_note('marked the checklist item **Task 2** as completed') expect(note1).not_to be_nil expect(note2).not_to be_nil description_notes = find_notes('description') expect(description_notes.length).to eq(1) end end context 'when tasks are marked as incomplete' do before do update_issue(description: "- [x] Task 1\n- [X] Task 2") update_issue(description: "- [ ] Task 1\n- [ ] Task 2") end it 'creates system note about task status change' do note1 = find_note('marked the checklist item **Task 1** as incomplete') note2 = find_note('marked the checklist item **Task 2** as incomplete') expect(note1).not_to be_nil expect(note2).not_to be_nil description_notes = find_notes('description') expect(description_notes.length).to eq(1) end end context 'when tasks position has been modified' do before do update_issue(description: "- [x] Task 1\n- [X] Task 2") update_issue(description: "- [x] Task 1\n- [ ] Task 3\n- [ ] Task 2") end it 'does not create a system note for the task' do task_note = find_note('marked the checklist item **Task 2** as incomplete') description_notes = find_notes('description') expect(task_note).to be_nil expect(description_notes.length).to eq(2) end end context 'when a Task list with a completed item is totally replaced' do before do update_issue(description: "- [ ] Task 1\n- [X] Task 2") update_issue(description: "- [ ] One\n- [ ] Two\n- [ ] Three") end it 'does not create a system note referencing the position the old item' do task_note = find_note('marked the checklist item **Two** as incomplete') description_notes = find_notes('description') expect(task_note).to be_nil expect(description_notes.length).to eq(2) end it 'does not generate a new note at all' do expect do update_issue(description: "- [ ] One\n- [ ] Two\n- [ ] Three") end.not_to change { Note.count } end end end context 'updating labels' do let(:label_a) { label } let(:label_b) { label2 } let(:label_c) { label3 } let(:label_locked) { create(:label, title: 'locked', project: project, lock_on_merge: true) } let(:issuable) { issue } it_behaves_like 'updating issuable labels' it_behaves_like 'keeps issuable labels sorted after update' it_behaves_like 'broadcasting issuable labels updates' context 'when the issue belongs directly to a group' do let(:container) { group } it_behaves_like 'updating issuable labels' end def update_issuable(update_params) update_issue(update_params) end end context 'updating dates' do subject(:result) { described_class.new(container: project, current_user: user, params: params).execute(issue) } let(:updated_date) { 1.week.from_now.to_date } shared_examples 'issue update service that triggers date updates' do it 'triggers graphql date updated subscription' do expect(GraphqlTriggers).to receive(:issuable_dates_updated).with(issue).and_call_original result end end shared_examples 'issue update service that does not trigger date updates' do it 'does not trigger date updated subscriptions' do expect(GraphqlTriggers).not_to receive(:issuable_dates_updated) result end end context 'when due_date is updated' do let(:params) { { due_date: updated_date } } it_behaves_like 'issue update service that triggers date updates' end context 'when start_date is updated' do let(:params) { { start_date: updated_date } } it_behaves_like 'issue update service that triggers date updates' end context 'when no date is updated' do let(:params) { { title: 'should not trigger date updates' } } it_behaves_like 'issue update service that does not trigger date updates' end context 'when update is not successful but date is provided' do let(:params) { { title: '', due_date: updated_date } } it_behaves_like 'issue update service that does not trigger date updates' end end context 'updating asssignee_id' do it 'changes assignee' do expect_next_instance_of(NotificationService::Async) do |service| expect(service).to receive(:reassigned_issue).with(issue, user, [user3]) end update_issue(assignee_ids: [user2.id]) expect(issue.reload.assignees).to eq([user2]) end it 'does not update assignee when assignee_id is invalid' do expect(NotificationService).not_to receive(:new) update_issue(assignee_ids: [-1]) expect(issue.reload.assignees).to eq([user3]) end it 'unassigns assignee when user id is 0' do expect_next_instance_of(NotificationService::Async) do |service| expect(service).to receive(:reassigned_issue).with(issue, user, [user3]) end update_issue(assignee_ids: [0]) expect(issue.reload.assignees).to be_empty end it 'does not update assignee_id when user cannot read issue' do expect(NotificationService).not_to receive(:new) update_issue(assignee_ids: [create(:user).id]) expect(issue.reload.assignees).to eq([user3]) end context "when issuable feature is private" do levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC] levels.each do |level| it "does not update with unauthorized assignee when project is #{Gitlab::VisibilityLevel.level_name(level)}" do expect(NotificationService).not_to receive(:new) assignee = create(:user) project.update!(visibility_level: level) feature_visibility_attr = :"#{issue.model_name.plural}_access_level" project.project_feature.update_attribute(feature_visibility_attr, ProjectFeature::PRIVATE) expect { update_issue(assignee_ids: [assignee.id]) }.not_to change { issue.assignees } end end end it 'tracks the assignment events' do original_assignee = issue.assignees.first! update_issue(assignee_ids: [user2.id]) update_issue(assignee_ids: []) update_issue(assignee_ids: [user3.id]) expected_events = [ have_attributes({ issue_id: issue.id, user_id: original_assignee.id, action: 'remove' }), have_attributes({ issue_id: issue.id, user_id: user2.id, action: 'add' }), have_attributes({ issue_id: issue.id, user_id: user2.id, action: 'remove' }), have_attributes({ issue_id: issue.id, user_id: user3.id, action: 'add' }) ] expect(issue.assignment_events).to match_array(expected_events) end end context 'updating mentions' do let(:mentionable) { issue } include_examples 'updating mentions', described_class end context 'updating severity' do let(:opts) { { severity: 'low' } } shared_examples 'updates the severity' do |expected_severity| it 'has correct value' do update_issue(opts) expect(issue.severity).to eq(expected_severity) end it 'creates a system note' do expect(::IncidentManagement::AddSeveritySystemNoteWorker).to receive(:perform_async).with(issue.id, user.id) update_issue(opts) end it 'triggers webhooks' do expect(project.project_namespace).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks) expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks) expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks) update_issue(opts) end end shared_examples 'does not change the severity' do it 'retains the original value' do expected_severity = issue.severity update_issue(opts) expect(issue.severity).to eq(expected_severity) end it 'does not trigger side-effects' do expect(::IncidentManagement::AddSeveritySystemNoteWorker).not_to receive(:perform_async) expect(project).not_to receive(:execute_hooks) expect(project).not_to receive(:execute_integrations) expect { update_issue(opts) }.not_to change(IssuableSeverity, :count) end end context 'on incidents' do let(:issue) { create(:incident, project: project) } context 'when severity has not been set previously' do it_behaves_like 'updates the severity', 'low' it 'creates a new record' do expect { update_issue(opts) }.to change(IssuableSeverity, :count).by(1) end context 'with unsupported severity value' do let(:opts) { { severity: 'unsupported-severity' } } it_behaves_like 'does not change the severity' end context 'with severity value defined but unchanged' do let(:opts) { { severity: IssuableSeverity::DEFAULT } } it_behaves_like 'does not change the severity' end context 'as guest' do let(:user) { guest } it_behaves_like 'does not change the severity' context 'and also author' do let(:issue) { create(:incident, project: project, author: user) } it_behaves_like 'does not change the severity' end context 'and also assignee' do let(:issue) { create(:incident, project: project, assignee_ids: [user.id]) } it_behaves_like 'does not change the severity' end end end context 'when severity has been set before' do before do create(:issuable_severity, issue: issue, severity: 'high') end it_behaves_like 'updates the severity', 'low' it 'does not create a new record' do expect { update_issue(opts) }.not_to change(IssuableSeverity, :count) end context 'with unsupported severity value' do let(:opts) { { severity: 'unsupported-severity' } } it_behaves_like 'updates the severity', IssuableSeverity::DEFAULT end context 'with severity value defined but unchanged' do let(:opts) { { severity: issue.severity } } it_behaves_like 'does not change the severity' end end end context 'when issue type is not incident' do it_behaves_like 'does not change the severity' end end context 'updating escalation status' do let(:opts) { { escalation_status: { status: 'acknowledged' } } } let(:escalation_update_class) { ::IncidentManagement::IssuableEscalationStatuses::AfterUpdateService } shared_examples 'updates the escalation status record' do |expected_status| let(:service_double) { instance_double(escalation_update_class) } it 'has correct value' do expect(escalation_update_class).to receive(:new).with(issue, user).and_return(service_double) expect(service_double).to receive(:execute) update_issue(opts) expect(issue.escalation_status.status_name).to eq(expected_status) end it 'triggers webhooks' do expect(project.project_namespace).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks) expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks) expect(project.project_namespace).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks) update_issue(opts) end end shared_examples 'does not change the status record' do it 'retains the original value' do expected_status = issue.escalation_status&.status_name update_issue(opts) expect(issue.escalation_status&.status_name).to eq(expected_status) end it 'does not trigger side-effects' do expect(project).not_to receive(:execute_hooks) expect(project).not_to receive(:execute_integrations) update_issue(opts) end end context 'when issue is an incident' do let(:issue) { create(:incident, project: project) } context 'with an escalation status record' do before do create(:incident_management_issuable_escalation_status, issue: issue) end it_behaves_like 'updates the escalation status record', :acknowledged context 'with unsupported status value' do let(:opts) { { escalation_status: { status: 'unsupported-status' } } } it_behaves_like 'does not change the status record' end context 'with status value defined but unchanged' do let(:opts) { { escalation_status: { status: issue.escalation_status.status_name } } } it_behaves_like 'does not change the status record' end end context 'without an escalation status record' do it 'creates a new record' do expect { update_issue(opts) }.to change(::IncidentManagement::IssuableEscalationStatus, :count).by(1) end it_behaves_like 'updates the escalation status record', :acknowledged end end context 'when issue type is not incident' do it_behaves_like 'does not change the status record' end end context 'duplicate issue' do let(:canonical_issue) { create(:issue, project: project) } context 'invalid canonical_issue_id' do it 'does not call the duplicate service' do expect(Issues::DuplicateService).not_to receive(:new) update_issue(canonical_issue_id: 123456789) end end context 'valid canonical_issue_id' do it 'calls the duplicate service with both issues' do expect_next_instance_of(Issues::DuplicateService) do |service| expect(service).to receive(:execute).with(issue, canonical_issue) end update_issue(canonical_issue_id: canonical_issue.id) end end end context 'move issue to another project' do let(:target_project) { create(:project) } context 'valid project' do before do target_project.add_maintainer(user) end it 'calls the move service with the proper issue and project' do move_stub = instance_double(Issues::MoveService) allow(Issues::MoveService).to receive(:new).and_return(move_stub) allow(move_stub).to receive(:execute).with(issue, target_project).and_return(issue) expect(move_stub).to receive(:execute).with(issue, target_project) update_issue(target_project: target_project) end end end context 'clone an issue' do context 'valid project' do let(:target_project) { create(:project) } before do target_project.add_maintainer(user) end it 'calls the move service with the proper issue and project' do clone_stub = instance_double(Issues::CloneService) allow(Issues::CloneService).to receive(:new).and_return(clone_stub) allow(clone_stub).to receive(:execute).with(issue, target_project, with_notes: nil).and_return(issue) expect(clone_stub).to receive(:execute).with(issue, target_project, with_notes: nil) update_issue(target_clone_project: target_project) end end end context 'clone an issue with notes' do context 'valid project' do let(:target_project) { create(:project) } before do target_project.add_maintainer(user) end it 'calls the move service with the proper issue and project' do clone_stub = instance_double(Issues::CloneService) allow(Issues::CloneService).to receive(:new).and_return(clone_stub) allow(clone_stub).to receive(:execute).with(issue, target_project, with_notes: true).and_return(issue) expect(clone_stub).to receive(:execute).with(issue, target_project, with_notes: true) update_issue(target_clone_project: target_project, clone_with_notes: true) end end end context 'when moving an issue ' do it 'raises an error for invalid move ids' do opts = { move_between_ids: [9000, non_existing_record_id] } expect { described_class.new(container: issue.project, current_user: user, params: opts).execute(issue) } .to raise_error(ActiveRecord::RecordNotFound) end end it_behaves_like 'issuable update service' do let(:open_issuable) { issue } let(:closed_issuable) { create(:closed_issue, project: project) } end context 'broadcasting issue assignee updates' do let(:update_params) { { assignee_ids: [user2.id] } } it 'triggers the GraphQL subscription' do expect(GraphqlTriggers).to receive(:issuable_assignees_updated).with(issue) update_issue(update_params) end context 'when assignee is not updated' do let(:update_params) { { title: 'Some other title' } } it 'does not trigger the GraphQL subscription' do expect(GraphqlTriggers).not_to receive(:issuable_assignees_updated).with(issue) update_issue(update_params) end end end it_behaves_like 'issuable record that supports quick actions' do let(:existing_issue) { create(:issue, project: project) } let(:issuable) { described_class.new(container: project, current_user: user, params: params).execute(existing_issue) } end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Issues class CreateService < Issues::BaseService include ResolveDiscussions prepend RateLimitedService include ::Services::ReturnServiceResponses rate_limit key: :issues_create, opts: { scope: [:project, :current_user, :external_author] } def initialize(container:, current_user: nil, params: {}, build_service: nil, perform_spam_check: true) @extra_params = params.delete(:extra_params) || {} super(container: container, current_user: current_user, params: params) @perform_spam_check = perform_spam_check @build_service = build_service || BuildService.new(container: project, current_user: current_user, params: params) end def execute(skip_system_notes: false) return error(_('Operation not allowed'), 403) unless @current_user.can?(authorization_action, container) # We should not initialize the callback classes during the build service execution because these will be # initialized when we call #create below @issue = @build_service.execute(initialize_callbacks: false) # issue_type and work_item_type are set in BuildService, so we can delete it from params, in later phase # it can be set also from quick actions [:issue_type, :work_item_type, :work_item_type_id].each { |attribute| params.delete(attribute) } handle_move_between_ids(@issue) @add_related_issue ||= params.delete(:add_related_issue) filter_resolve_discussion_params issue = create(@issue, skip_system_notes: skip_system_notes) if issue.persisted? success(issue: issue) else error(issue.errors.full_messages, 422, pass_back: { issue: issue }) end end def external_author params[:external_author] # present when creating an issue using service desk (email: from) end def before_create(issue) issue.check_for_spam(user: current_user, action: :create) if perform_spam_check # current_user (defined in BaseService) is not available within run_after_commit block user = current_user assign_description_from_template(issue) issue.run_after_commit do NewIssueWorker.perform_async(issue.id, user.id, issue.class.to_s) Issues::PlacementWorker.perform_async(nil, issue.project_id) # issue.namespace_id can point to either a project through project namespace or a group. Onboarding::IssueCreatedWorker.perform_async(issue.namespace_id) end end # Add new items to Issues::AfterCreateService if they can be performed in Sidekiq def after_create(issue) user_agent_detail_service.create handle_add_related_issue(issue) resolve_discussions_with_issue(issue) handle_escalation_status_change(issue) create_timeline_event(issue) try_to_associate_contacts(issue) super end def handle_changes(issue, options) super old_associations = options.fetch(:old_associations, {}) old_assignees = old_associations.fetch(:assignees, []) handle_assignee_changes(issue, old_assignees) end def handle_assignee_changes(issue, old_assignees) return if issue.assignees == old_assignees create_assignee_note(issue, old_assignees) Gitlab::ResourceEvents::AssignmentEventRecorder.new(parent: issue, old_assignees: old_assignees).record end def resolve_discussions_with_issue(issue) return if discussions_to_resolve.empty? Discussions::ResolveService.new( project, current_user, one_or_more_discussions: discussions_to_resolve, follow_up_issue: issue ).execute end private def authorization_action :create_issue end attr_reader :perform_spam_check, :extra_params def create_timeline_event(issue) return unless issue.work_item_type&.incident? IncidentManagement::TimelineEvents::CreateService.create_incident(issue, current_user) end def user_agent_detail_service UserAgentDetailService.new(spammable: @issue, perform_spam_check: perform_spam_check) end def handle_add_related_issue(issue) return unless @add_related_issue IssueLinks::CreateService.new(issue, issue.author, { target_issuable: @add_related_issue }).execute end def try_to_associate_contacts(issue) return unless issue.external_author return unless current_user.can?(:set_issue_crm_contacts, issue) contacts = [issue.external_author] contacts.concat extra_params[:cc] unless extra_params[:cc].nil? set_crm_contacts(issue, contacts) end def assign_description_from_template(issue) return if issue.description.present? # Find the exact name for the default template (if the project has one). # Since there are multiple possibilities regarding the capitalization(s) that the # default template file name can have, getting the exact template name here will # allow us to extract the contents later, and bail early if the project does not have # a default template templates = TemplateFinder.all_template_names(project, :issues) template = templates.values.flatten.find { |tmpl| tmpl[:name].casecmp?('default') } return unless template begin default_template = TemplateFinder.build( :issues, issue.project, { name: template[:name], source_template_project_id: issue.project.id } ).execute rescue ::Gitlab::Template::Finders::RepoTemplateFinder::FileNotFoundError nil end issue.description = default_template.content if default_template.present? end end end Issues::CreateService.prepend_mod ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Issues::CreateService, feature_category: :team_planning do include AfterNextHelpers let_it_be(:group) { create(:group, :crm_enabled) } let_it_be_with_reload(:project) { create(:project, :public, group: group) } let_it_be(:user) { create(:user) } let(:opts) { { title: 'title' } } let(:service) { described_class.new(container: project, current_user: user, params: opts) } it_behaves_like 'rate limited service' do let(:key) { :issues_create } let(:key_scope) { %i[project current_user external_author] } let(:application_limit_key) { :issues_create_limit } let(:created_model) { Issue } end describe '#execute' do let_it_be(:assignee) { create(:user) } let_it_be(:milestone) { create(:milestone, project: project) } let(:result) { service.execute } let(:issue) { result[:issue] } context 'when params are invalid' do let(:opts) { { title: '' } } before_all do project.add_guest(assignee) end it 'returns an error service response' do expect(result).to be_error expect(result.errors).to include("Title can't be blank") expect(issue).not_to be_persisted end end context 'when params are valid' do let_it_be(:labels) { create_pair(:label, project: project) } before_all do project.add_guest(user) project.add_guest(assignee) end let(:opts) do { title: 'Awesome issue', issue_type: :task, description: 'please fix', assignee_ids: [assignee.id], label_ids: labels.map(&:id), milestone_id: milestone.id, milestone: milestone, due_date: Date.tomorrow } end context 'when an unauthorized project_id is provided' do let(:unauthorized_project) { create(:project) } before do opts[:project_id] = unauthorized_project.id end it 'ignores the project_id param and creates issue in the given project' do expect(issue.project).to eq(project) expect(unauthorized_project.reload.issues.count).to eq(0) end end describe 'authorization' do let_it_be(:project) { create(:project, :private, group: group).tap { |project| project.add_guest(user) } } let(:opts) { { title: 'private issue', description: 'please fix' } } context 'when the user is authorized' do it 'allows the user to create an issue' do expect(result).to be_success expect(issue).to be_persisted end end context 'when the user is not authorized' do let(:user) { create(:user) } it 'does not allow the user to create an issue' do expect(result).to be_error expect(result.errors).to contain_exactly('Operation not allowed') expect(result.http_status).to eq(403) expect(issue).to be_nil end end end it 'works if base work item types were not created yet' do WorkItems::Type.delete_all expect do issue end.to change(Issue, :count).by(1) end it 'creates the issue with the given params' do expect(Issuable::CommonSystemNotesService).to receive_message_chain(:new, :execute) expect(result).to be_success expect(issue).to be_persisted expect(issue).to be_a(::Issue) expect(issue.title).to eq('Awesome issue') expect(issue.assignees).to eq([assignee]) expect(issue.labels).to match_array(labels) expect(issue.milestone).to eq(milestone) expect(issue.due_date).to eq(Date.tomorrow) expect(issue.work_item_type.base_type).to eq('task') expect(issue.issue_customer_relations_contacts).to be_empty end context 'with milestone' do it 'deletes milestone issues count cache' do expect_next(Milestones::IssuesCountService, milestone) .to receive(:delete_cache).and_call_original expect(result).to be_success end end context 'when the work item type is not allowed to create' do before do allow_next_instance_of(::Issues::BuildService) do |instance| allow(instance).to receive(:create_issue_type_allowed?).twice.and_return(false) end end it 'ignores the type and creates default issue' do expect(result).to be_success expect(issue).to be_persisted expect(issue).to be_a(::Issue) expect(issue.work_item_type.base_type).to eq('issue') end end it 'calls NewIssueWorker with correct arguments' do expect(NewIssueWorker).to receive(:perform_async).with(Integer, user.id, 'Issue') issue end it 'calls GroupMentionWorker' do expect(Integrations::GroupMentionWorker).to receive(:perform_async) issue end context 'when a build_service is provided' do let(:result) { described_class.new(container: project, current_user: user, params: opts, build_service: build_service).execute } let(:issue_from_builder) { build(:work_item, project: project, title: 'Issue from builder') } let(:build_service) { double(:build_service, execute: issue_from_builder) } it 'uses the provided service to build the issue' do expect(result).to be_success expect(issue).to be_persisted expect(issue).to be_a(WorkItem) end end context 'when issue template is provided' do let_it_be(:files) { { '.gitlab/issue_templates/Default.md' => 'Default template contents' } } let_it_be_with_reload(:project) { create(:project, :custom_repo, group: group, files: files).tap { |project| project.add_guest(user) } } context 'when description is blank' do it 'sets template contents as description when description is blank' do opts[:description] = '' expect(result).to be_success expect(issue).to be_persisted expect(issue).to have_attributes(description: 'Default template contents') end end context 'when description is not blank' do it 'does not apply template when description is not blank' do expect(result).to be_success expect(issue).to be_persisted expect(issue).to have_attributes(description: 'please fix') end end end context 'when skip_system_notes is true' do let(:issue) { described_class.new(container: project, current_user: user, params: opts).execute(skip_system_notes: true) } it 'does not call Issuable::CommonSystemNotesService' do expect(Issuable::CommonSystemNotesService).not_to receive(:new) issue end end context 'when setting a position' do let(:issue_before) { create(:issue, project: project, relative_position: 10) } let(:issue_after) { create(:issue, project: project, relative_position: 50) } before do opts.merge!(move_between_ids: [issue_before.id, issue_after.id]) end it 'sets the correct relative position' do expect(result).to be_success expect(issue).to be_persisted expect(issue.relative_position).to be_present expect(issue.relative_position).to be_between(issue_before.relative_position, issue_after.relative_position) end end it_behaves_like 'not an incident issue' context 'when issue is incident type' do before do opts.merge!(issue_type: 'incident') end let(:current_user) { user } subject { issue } context 'as reporter' do let_it_be(:reporter) { create(:user) } let(:user) { reporter } before_all do project.add_reporter(reporter) end it_behaves_like 'incident issue' it 'calls IncidentManagement::Incidents::CreateEscalationStatusService' do expect_next(::IncidentManagement::IssuableEscalationStatuses::CreateService, a_kind_of(Issue)) .to receive(:execute) issue end it 'calls IncidentManagement::TimelineEvents::CreateService.create_incident' do expect(IncidentManagement::TimelineEvents::CreateService) .to receive(:create_incident) .with(a_kind_of(Issue), reporter) issue end it 'calls NewIssueWorker with correct arguments' do expect(NewIssueWorker).to receive(:perform_async).with(Integer, reporter.id, 'Issue') issue end context 'when invalid' do before do opts.merge!(title: '') end it 'does not apply an incident label prematurely' do expect { subject }.to not_change(LabelLink, :count).and not_change(Issue, :count) end end end context 'as guest' do it_behaves_like 'not an incident issue' end end it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do expect do issue BatchLoader::Executor.clear_current end.to change { project.open_issues_count }.from(0).to(1) end context 'when current user cannot set issue metadata in the project' do let_it_be(:non_member) { create(:user) } it 'filters out params that cannot be set without the :set_issue_metadata permission' do result = described_class.new(container: project, current_user: non_member, params: opts).execute issue = result[:issue] expect(result).to be_success expect(issue).to be_persisted expect(issue.title).to eq('Awesome issue') expect(issue.description).to eq('please fix') expect(issue.assignees).to be_empty expect(issue.labels).to be_empty expect(issue.milestone).to be_nil expect(issue.due_date).to be_nil end it 'can create confidential issues' do result = described_class.new(container: project, current_user: non_member, params: opts.merge(confidential: true)).execute issue = result[:issue] expect(result).to be_success expect(issue.confidential).to be_truthy end end it 'moves the issue to the end, in an asynchronous worker' do expect(Issues::PlacementWorker).to receive(:perform_async).with(be_nil, Integer) described_class.new(container: project, current_user: user, params: opts).execute end context 'when label belongs to project group' do let(:group) { create(:group) } let(:group_labels) { create_pair(:group_label, group: group) } let(:opts) do { title: 'Title', description: 'Description', label_ids: group_labels.map(&:id) } end before do project.update!(group: group) end it 'assigns group labels' do expect(issue.labels).to match_array group_labels end end context 'when label belongs to different project' do let(:label) { create(:label) } let(:opts) do { title: 'Title', description: 'Description', label_ids: [label.id] } end it 'does not assign label' do expect(issue.labels).not_to include label end end context 'when labels is nil' do let(:opts) do { title: 'Title', description: 'Description', labels: nil } end it 'does not assign label' do expect(issue.labels).to be_empty end end context 'when labels is nil and label_ids is present' do let(:opts) do { title: 'Title', description: 'Description', labels: nil, label_ids: labels.map(&:id) } end it 'assigns group labels' do expect(issue.labels).to match_array labels end end context 'when milestone belongs to different project' do let(:milestone) { create(:milestone) } let(:opts) do { title: 'Title', description: 'Description', milestone_id: milestone.id } end it 'does not assign milestone' do expect(issue.milestone).not_to eq milestone end end context 'when assignee is set' do let(:opts) do { title: 'Title', description: 'Description', assignee_ids: [assignee.id] } end it 'invalidates open issues counter for assignees when issue is assigned' do project.add_maintainer(assignee) described_class.new(container: project, current_user: user, params: opts).execute expect(assignee.assigned_open_issues_count).to eq 1 end it 'records the assignee assignment event' do result = described_class.new(container: project, current_user: user, params: opts).execute issue = result.payload[:issue] expect(issue.assignment_events).to match([have_attributes(user_id: assignee.id, action: 'add')]) end end context 'when duplicate label titles are given' do let(:label) { create(:label, project: project) } let(:opts) do { title: 'Title', description: 'Description', labels: [label.title, label.title] } end it 'assigns the label once' do expect(issue.labels).to contain_exactly(label) end end context 'when sentry identifier is given' do before do sentry_attributes = { sentry_issue_attributes: { sentry_issue_identifier: 42 } } opts.merge!(sentry_attributes) end it 'does not assign the sentry error' do expect(issue.sentry_issue).to eq(nil) end context 'user is reporter or above' do before do project.add_developer(user) end it 'assigns the sentry error' do expect(issue.sentry_issue).to be_kind_of(SentryIssue) end end end describe 'executing hooks' do let(:opts) { { title: 'Title', description: 'Description' } } let(:expected_payload) do include( event_type: 'issue', object_kind: 'issue', changes: { author_id: { current: user.id, previous: nil }, created_at: { current: kind_of(Time), previous: nil }, description: { current: opts[:description], previous: nil }, id: { current: kind_of(Integer), previous: nil }, iid: { current: kind_of(Integer), previous: nil }, project_id: { current: project.id, previous: nil }, title: { current: opts[:title], previous: nil }, updated_at: { current: kind_of(Time), previous: nil }, time_estimate: { current: 0, previous: nil } }, object_attributes: include( opts.merge( author_id: user.id, project_id: project.id ) ) ) end it 'executes issue hooks' do expect(project.project_namespace).to receive(:execute_hooks).with(expected_payload, :issue_hooks) expect(project.project_namespace).to receive(:execute_integrations).with(expected_payload, :issue_hooks) described_class.new(container: project, current_user: user, params: opts).execute end context 'when issue is confidential' do let(:expected_payload) do include( event_type: 'confidential_issue', object_kind: 'issue', changes: include( confidential: { current: true, previous: false } ), object_attributes: include(confidential: true) ) end before do opts[:confidential] = true end it 'executes confidential issue hooks' do expect(project.project_namespace).to receive(:execute_hooks).with(expected_payload, :confidential_issue_hooks) expect(project.project_namespace).to receive(:execute_integrations).with(expected_payload, :confidential_issue_hooks) described_class.new(container: project, current_user: user, params: opts).execute end end end context 'after_save callback to store_mentions' do context 'when mentionable attributes change' do let(:opts) { { title: 'Title', description: "Description with #{user.to_reference}" } } it 'saves mentions' do expect_next_instance_of(Issue) do |instance| expect(instance).to receive(:store_mentions!).and_call_original end expect(issue.user_mentions.count).to eq 1 end end context 'when save fails' do let(:opts) { { title: '', label_ids: labels.map(&:id), milestone_id: milestone.id } } it 'does not call store_mentions' do expect_next_instance_of(Issue) do |instance| expect(instance).not_to receive(:store_mentions!).and_call_original end expect(issue.valid?).to be false expect(issue.user_mentions.count).to eq 0 end end end it 'schedules a namespace onboarding create action worker' do expect(Onboarding::IssueCreatedWorker).to receive(:perform_async).with(project.project_namespace_id) issue end end context 'issue create service' do context 'assignees' do before_all do project.add_maintainer(user) end it 'removes assignee when user id is invalid' do opts = { title: 'Title', description: 'Description', assignee_ids: [-1] } result = described_class.new(container: project, current_user: user, params: opts).execute issue = result[:issue] expect(result).to be_success expect(issue.assignees).to be_empty end it 'removes assignee when user id is 0' do opts = { title: 'Title', description: 'Description', assignee_ids: [0] } result = described_class.new(container: project, current_user: user, params: opts).execute issue = result[:issue] expect(result).to be_success expect(issue.assignees).to be_empty end it 'saves assignee when user id is valid' do project.add_maintainer(assignee) opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] } result = described_class.new(container: project, current_user: user, params: opts).execute issue = result[:issue] expect(result).to be_success expect(issue.assignees).to eq([assignee]) end context "when issuable feature is private" do before do project.project_feature.update!( issues_access_level: ProjectFeature::PRIVATE, merge_requests_access_level: ProjectFeature::PRIVATE ) end levels = [Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PUBLIC] levels.each do |level| it "removes not authorized assignee when project is #{Gitlab::VisibilityLevel.level_name(level)}" do project.update!(visibility_level: level) opts = { title: 'Title', description: 'Description', assignee_ids: [assignee.id] } result = described_class.new(container: project, current_user: user, params: opts).execute issue = result[:issue] expect(result).to be_success expect(issue.assignees).to be_empty end end end end end it_behaves_like 'issuable record that supports quick actions' do let(:issuable) { described_class.new(container: project, current_user: user, params: params).execute[:issue] } end context 'Quick actions' do context 'with assignee, milestone, and contact in params and command' do let_it_be(:contact) { create(:contact, group: group) } let(:opts) do { assignee_ids: [create(:user).id], milestone_id: 1, title: 'Title', description: %(/assign @#{assignee.username}\n/milestone %"#{milestone.name}"), add_contacts: [contact.email] } end before_all do group.add_maintainer(user) project.add_maintainer(assignee) end it 'assigns, sets milestone, and sets contact to issuable from command' do expect(result).to be_success expect(issue).to be_persisted expect(issue.assignees).to eq([assignee]) expect(issue.milestone).to eq(milestone) expect(issue.issue_customer_relations_contacts.last.contact).to eq(contact) end end context 'with external_author' do let_it_be(:contact) { create(:contact, group: group) } context 'when CRM contact exists with matching e-mail' do let(:opts) do { title: 'Title', external_author: contact.email } end context 'with permission' do it 'assigns contact to issue' do group.add_reporter(user) expect(result).to be_success expect(issue).to be_persisted expect(issue.issue_customer_relations_contacts.last.contact).to eq(contact) end end context 'without permission' do it 'does not assign contact to issue' do group.add_guest(user) expect(result).to be_success expect(issue).to be_persisted expect(issue.issue_customer_relations_contacts).to be_empty end end end context 'when no CRM contact exists with matching e-mail' do let(:opts) do { title: 'Title', external_author: '[email protected]' } end it 'does not assign contact to issue' do group.add_reporter(user) expect(issue).to be_persisted expect(issue.issue_customer_relations_contacts).to be_empty end end end context 'with alert bot author' do let_it_be(:user) { Users::Internal.alert_bot } let_it_be(:label) { create(:label, project: project) } let(:opts) do { title: 'Title', description: %(/label #{label.to_reference(format: :name)}") } end it 'can apply labels' do expect(result).to be_success expect(issue).to be_persisted expect(issue.labels).to eq([label]) end end context 'when using promote_to_incident' do let(:opts) { { title: 'Title', description: '/promote_to_incident' } } before do project.add_developer(user) end it 'creates an issue with the correct issue type' do expect { result }.to change(Issue, :count).by(1) created_issue = Issue.last expect(created_issue.work_item_type).to eq(WorkItems::Type.default_by_type('incident')) end end end context 'resolving discussions' do let_it_be(:discussion) { create(:diff_note_on_merge_request).to_discussion } let_it_be(:merge_request) { discussion.noteable } let_it_be(:project) { merge_request.source_project } before_all do project.add_maintainer(user) end describe 'for a single discussion' do let(:opts) { { discussion_to_resolve: discussion.id, merge_request_to_resolve_discussions_of: merge_request.iid } } it 'resolves the discussion' do described_class.new(container: project, current_user: user, params: opts).execute discussion.first_note.reload expect(discussion.resolved?).to be(true) end it 'added a system note to the discussion' do described_class.new(container: project, current_user: user, params: opts).execute reloaded_discussion = MergeRequest.find(merge_request.id).discussions.first expect(reloaded_discussion.last_note.system).to eq(true) end it 'sets default title and description values if not provided' do result = described_class.new( container: project, current_user: user, params: opts ).execute issue = result[:issue] expect(result).to be_success expect(issue).to be_persisted expect(issue.title).to eq("Follow-up from \"#{merge_request.title}\"") expect(issue.description).to include("The following discussion from #{merge_request.to_reference} should be addressed") end it 'takes params from the request over the default values' do result = described_class.new( container: project, current_user: user, params: opts.merge( description: 'Custom issue description', title: 'My new issue' ) ).execute issue = result[:issue] expect(result).to be_success expect(issue).to be_persisted expect(issue.description).to eq('Custom issue description') expect(issue.title).to eq('My new issue') end end describe 'for a merge request' do let(:opts) { { merge_request_to_resolve_discussions_of: merge_request.iid } } it 'resolves the discussion' do described_class.new(container: project, current_user: user, params: opts).execute discussion.first_note.reload expect(discussion.resolved?).to be(true) end it 'added a system note to the discussion' do described_class.new(container: project, current_user: user, params: opts).execute reloaded_discussion = MergeRequest.find(merge_request.id).discussions.first expect(reloaded_discussion.last_note.system).to eq(true) end it 'sets default title and description values if not provided' do result = described_class.new( container: project, current_user: user, params: opts ).execute issue = result[:issue] expect(result).to be_success expect(issue).to be_persisted expect(issue.title).to eq("Follow-up from \"#{merge_request.title}\"") expect(issue.description).to include("The following discussion from #{merge_request.to_reference} should be addressed") end it 'takes params from the request over the default values' do result = described_class.new( container: project, current_user: user, params: opts.merge( description: 'Custom issue description', title: 'My new issue' ) ).execute issue = result[:issue] expect(result).to be_success expect(issue).to be_persisted expect(issue.description).to eq('Custom issue description') expect(issue.title).to eq('My new issue') end end end context 'add related issue' do let_it_be(:related_issue) { create(:issue, project: project) } let(:opts) do { title: 'A new issue', add_related_issue: related_issue } end it 'ignores related issue if not accessible' do expect { issue }.not_to change { IssueLink.count } expect(result).to be_success expect(issue).to be_persisted end context 'when user has access to the related issue' do before do project.add_developer(user) end it 'adds a link to the issue' do expect { issue }.to change { IssueLink.count }.by(1) expect(result).to be_success expect(issue).to be_persisted expect(issue.related_issues(user)).to eq([related_issue]) end end end context 'checking spam' do let(:params) do { title: 'Spam issue' } end let(:perform_spam_check) { true } subject do described_class.new(container: project, current_user: user, params: params, perform_spam_check: perform_spam_check) end it 'checks for spam' do expect_next_instance_of(Issue) do |instance| expect(instance).to receive(:check_for_spam).with(user: user, action: :create) end subject.execute end context 'when `perform_spam_check` is set to `false`' do let(:perform_spam_check) { false } it 'does not execute the SpamActionService' do expect_next_instance_of(Issue) do |instance| expect(instance).not_to receive(:check_for_spam) end subject.execute end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Pages class DestroyDeploymentsService def initialize(project, last_deployment_id = nil) @project = project @last_deployment_id = last_deployment_id end def execute deployments_to_destroy = @project.pages_deployments deployments_to_destroy = deployments_to_destroy.older_than(@last_deployment_id) if @last_deployment_id deployments_to_destroy.find_each(&:destroy) # rubocop: disable CodeReuse/ActiveRecord end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Pages::DestroyDeploymentsService, feature_category: :pages do let_it_be(:project) { create(:project) } let!(:old_deployments) { create_list(:pages_deployment, 2, project: project) } let!(:last_deployment) { create(:pages_deployment, project: project) } let!(:newer_deployment) { create(:pages_deployment, project: project) } let!(:deployment_from_another_project) { create(:pages_deployment) } it 'destroys all deployments of the project' do expect { described_class.new(project).execute } .to change { PagesDeployment.count }.by(-4) expect(deployment_from_another_project.reload).to be_persisted end it 'destroy only deployments older than last deployment if it is provided' do expect { described_class.new(project, last_deployment.id).execute } .to change { PagesDeployment.count }.by(-2) expect(last_deployment.reload).to be_persisted expect(newer_deployment.reload).to be_persisted expect(deployment_from_another_project.reload).to be_persisted end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Pages class DeleteService < BaseService def execute PagesDeployment.deactivate_all(project) # project.pages_domains.delete_all will just nullify project_id: # > If no :dependent option is given, then it will follow the default # > strategy for `has_many :through` associations. # > The default strategy is :nullify which sets the foreign keys to NULL. PagesDomain.for_project(project).delete_all publish_deleted_event DestroyPagesDeploymentsWorker.perform_async(project.id) end private def publish_deleted_event event = Pages::PageDeletedEvent.new(data: { project_id: project.id, namespace_id: project.namespace_id, root_namespace_id: project.root_namespace.id }) Gitlab::EventStore.publish(event) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Pages::DeleteService, feature_category: :pages do let_it_be(:admin) { create(:admin) } let(:project) { create(:project, path: "my.project") } let(:service) { described_class.new(project, admin) } it 'marks pages as not deployed' do create(:pages_deployment, project: project) expect { service.execute } .to change { project.reload.pages_deployed? } .from(true).to(false) end it 'deletes all domains' do domain = create(:pages_domain, project: project) unrelated_domain = create(:pages_domain) service.execute expect(PagesDomain.find_by_id(domain.id)).to eq(nil) expect(PagesDomain.find_by_id(unrelated_domain.id)).to be_present end it 'schedules a destruction of pages deployments' do expect(DestroyPagesDeploymentsWorker) .to(receive(:perform_async).with(project.id)) service.execute end it 'removes pages deployments', :sidekiq_inline do create(:pages_deployment, project: project) expect { service.execute } .to change { PagesDeployment.count }.by(-1) end it 'publishes a ProjectDeleted event with project id and namespace id' do expected_data = { project_id: project.id, namespace_id: project.namespace_id, root_namespace_id: project.root_namespace.id } expect { service.execute }.to publish_event(Pages::PageDeletedEvent).with(expected_data) end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Timelogs class CreateService < Timelogs::BaseService attr_accessor :issuable, :time_spent, :spent_at, :summary def initialize(issuable, time_spent, spent_at, summary, user) super(user) @issuable = issuable @time_spent = time_spent @spent_at = spent_at @summary = summary end def execute unless can?(current_user, :create_timelog, issuable) return error( _("%{issuable_class_name} doesn't exist or you don't have permission to add timelog to it.") % { issuable_class_name: issuable.nil? ? 'Issuable' : issuable.base_class_name }, 404) end return error(_("Spent at can't be a future date and time."), 404) if spent_at.future? return error(_("Time spent can't be zero."), 404) if time_spent == 0 issue = issuable if issuable.is_a?(Issue) merge_request = issuable if issuable.is_a?(MergeRequest) timelog = Timelog.new( time_spent: time_spent, spent_at: spent_at, summary: summary, user: current_user, issue: issue, merge_request: merge_request, note: nil ) if !timelog.save error_in_save(timelog) else SystemNoteService.created_timelog(issuable, issuable.project, current_user, timelog) success(timelog) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Timelogs::CreateService, feature_category: :team_planning do let_it_be(:author) { create(:user) } let_it_be(:project) { create(:project, :public) } let(:issuable) { nil } let(:users_container) { project } describe '#execute' do subject { service.execute } context 'when issuable is an Issue' do let_it_be(:issuable) { create(:issue, project: project) } let_it_be(:note_noteable) { create(:issue, project: project) } it_behaves_like 'issuable supports timelog creation service' end context 'when issuable is a MergeRequest' do let_it_be(:issuable) { create(:merge_request, source_project: project, source_branch: 'branch-1') } let_it_be(:note_noteable) { create(:merge_request, source_project: project, source_branch: 'branch-2') } it_behaves_like 'issuable supports timelog creation service' end context 'when issuable is a WorkItem' do let_it_be(:issuable) { create(:work_item, project: project, title: 'WorkItem-1') } let_it_be(:note_noteable) { create(:work_item, project: project, title: 'WorkItem-2') } it_behaves_like 'issuable supports timelog creation service' end context 'when issuable is an Incident' do let_it_be(:issuable) { create(:incident, project: project) } let_it_be(:note_noteable) { create(:incident, project: project) } it_behaves_like 'issuable supports timelog creation service' end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Timelogs class DeleteService < Timelogs::BaseService attr_accessor :timelog def initialize(timelog, user) super(user) @timelog = timelog end def execute unless can?(current_user, :admin_timelog, timelog) return error(_("Timelog doesn't exist or you don't have permission to delete it"), 404) end if timelog.destroy issuable = timelog.issuable if issuable # Add a system note for the timelog removal SystemNoteService.remove_timelog(issuable, issuable.project, current_user, timelog) end success(timelog) else error(_('Failed to remove timelog'), 400) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Timelogs::DeleteService, feature_category: :team_planning do let_it_be(:author) { create(:user) } let_it_be(:project) { create(:project, :public) } let_it_be(:issue) { create(:issue, project: project) } let_it_be(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) } let(:service) { described_class.new(timelog, user) } describe '#execute' do subject { service.execute } context 'when the timelog exists' do let(:user) { author } it 'removes the timelog' do expect { subject }.to change { Timelog.count }.by(-1) end it 'returns the removed timelog' do is_expected.to be_success expect(subject.payload[:timelog]).to eq(timelog) end end context 'when the timelog does not exist' do let(:user) { create(:user) } let!(:timelog) { nil } it 'returns an error' do is_expected.to be_error expect(subject.message).to eq('Timelog doesn\'t exist or you don\'t have permission to delete it') expect(subject.http_status).to eq(404) end end context 'when the user does not have permission' do let(:user) { create(:user) } it 'returns an error' do is_expected.to be_error expect(subject.message).to eq('Timelog doesn\'t exist or you don\'t have permission to delete it') expect(subject.http_status).to eq(404) end end context 'when the timelog deletion fails' do let(:user) { author } let!(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) } before do allow(timelog).to receive(:destroy).and_return(false) end it 'returns an error' do is_expected.to be_error expect(subject.message).to eq('Failed to remove timelog') expect(subject.http_status).to eq(400) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module ActivityPub class InboxResolverService attr_reader :subscription def initialize(subscription) @subscription = subscription end def execute profile = subscriber_profile unless profile.has_key?('inbox') && profile['inbox'].is_a?(String) raise ThirdPartyError, 'Inbox parameter absent or invalid' end subscription.subscriber_inbox_url = profile['inbox'] subscription.shared_inbox_url = profile.dig('entrypoints', 'sharedInbox') subscription.save! end private def subscriber_profile raw_data = download_subscriber_profile begin profile = Gitlab::Json.parse(raw_data) rescue JSON::ParserError => e raise ThirdPartyError, e.message end profile end def download_subscriber_profile begin response = Gitlab::HTTP.get(subscription.subscriber_url, headers: { 'Accept' => 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"' } ) rescue StandardError => e raise ThirdPartyError, e.message end response.body end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ActivityPub::InboxResolverService, feature_category: :integrations do let_it_be(:project) { create(:project, :public) } let_it_be_with_reload(:existing_subscription) { create(:activity_pub_releases_subscription, project: project) } let(:service) { described_class.new(existing_subscription) } shared_examples 'third party error' do it 'raises a ThirdPartyError' do expect { service.execute }.to raise_error(ActivityPub::ThirdPartyError) end it 'does not update the subscription record' do begin service.execute rescue StandardError end expect(ActivityPub::ReleasesSubscription.last.subscriber_inbox_url).not_to eq 'https://example.com/user/inbox' end end describe '#execute' do context 'with successful HTTP request' do before do allow(Gitlab::HTTP).to receive(:get) { response } end let(:response) { instance_double(HTTParty::Response, body: body) } context 'with a JSON response' do let(:body) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/user', type: 'Person', **inbox, **entrypoints, outbox: 'https://example.com/user/outbox' }.to_json end let(:entrypoints) { {} } context 'with valid response' do let(:inbox) { { inbox: 'https://example.com/user/inbox' } } context 'without a shared inbox' do it 'updates only the inbox in the subscription record' do service.execute expect(ActivityPub::ReleasesSubscription.last.subscriber_inbox_url).to eq 'https://example.com/user/inbox' expect(ActivityPub::ReleasesSubscription.last.shared_inbox_url).to be_nil end end context 'with a shared inbox' do let(:entrypoints) { { entrypoints: { sharedInbox: 'https://example.com/shared-inbox' } } } it 'updates both the inbox and shared inbox in the subscription record' do service.execute expect(ActivityPub::ReleasesSubscription.last.subscriber_inbox_url).to eq 'https://example.com/user/inbox' expect(ActivityPub::ReleasesSubscription.last.shared_inbox_url).to eq 'https://example.com/shared-inbox' end end end context 'without inbox attribute' do let(:inbox) { {} } it_behaves_like 'third party error' end context 'with a non string inbox attribute' do let(:inbox) { { inbox: 27.13 } } it_behaves_like 'third party error' end end context 'with non JSON response' do let(:body) { '<div>woops</div>' } it_behaves_like 'third party error' end end context 'with http error' do before do allow(Gitlab::HTTP).to receive(:get).and_raise(Errno::ECONNREFUSED) end it_behaves_like 'third party error' end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module ActivityPub class AcceptFollowService MissingInboxURLError = Class.new(StandardError) attr_reader :subscription, :actor def initialize(subscription, actor) @subscription = subscription @actor = actor end def execute return if subscription.accepted? raise MissingInboxURLError unless subscription.subscriber_inbox_url.present? upload_accept_activity subscription.accepted! end private def upload_accept_activity body = Gitlab::Json::LimitedEncoder.encode(payload, limit: 1.megabyte) begin Gitlab::HTTP.post(subscription.subscriber_inbox_url, body: body, headers: headers) rescue StandardError => e raise ThirdPartyError, e.message end end def payload follow = subscription.payload.dup follow.delete('@context') { '@context': 'https://www.w3.org/ns/activitystreams', id: "#{actor}#follow/#{subscription.id}/accept", type: 'Accept', actor: actor, object: follow } end def headers { 'User-Agent' => "GitLab/#{Gitlab::VERSION}", 'Content-Type' => 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"', 'Accept' => 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"' } end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ActivityPub::AcceptFollowService, feature_category: :integrations do let_it_be(:project) { create(:project, :public) } let_it_be_with_reload(:existing_subscription) do create(:activity_pub_releases_subscription, :inbox, project: project) end let(:service) { described_class.new(existing_subscription, 'http://localhost/my-project/releases') } describe '#execute' do context 'when third party server complies' do before do allow(Gitlab::HTTP).to receive(:post).and_return(true) service.execute end it 'sends an Accept activity' do expect(Gitlab::HTTP).to have_received(:post) end it 'updates subscription state to accepted' do expect(existing_subscription.reload.status).to eq 'accepted' end end context 'when there is an error with third party server' do before do allow(Gitlab::HTTP).to receive(:post).and_raise(Errno::ECONNREFUSED) end it 'raises a ThirdPartyError' do expect { service.execute }.to raise_error(ActivityPub::ThirdPartyError) end it 'does not update subscription state to accepted' do begin service.execute rescue StandardError end expect(existing_subscription.reload.status).to eq 'requested' end end context 'when subscription is already accepted' do before do allow(Gitlab::HTTP).to receive(:post).and_return(true) allow(existing_subscription).to receive(:accepted!).and_return(true) existing_subscription.status = :accepted service.execute end it 'does not send an Accept activity' do expect(Gitlab::HTTP).not_to have_received(:post) end it 'does not update subscription state' do expect(existing_subscription).not_to have_received(:accepted!) end end context 'when inbox has not been resolved' do before do allow(Gitlab::HTTP).to receive(:post).and_return(true) allow(existing_subscription).to receive(:accepted!).and_return(true) end it 'raises an error' do existing_subscription.subscriber_inbox_url = nil expect { service.execute }.to raise_error(ActivityPub::AcceptFollowService::MissingInboxURLError) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module ActivityPub module Projects class ReleasesUnfollowService < ReleasesSubscriptionService def execute unless subscriber_url errors << "You need to provide an actor id for your unsubscribe activity" return false end return true unless previous_subscription.present? previous_subscription.destroy end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ActivityPub::Projects::ReleasesUnfollowService, feature_category: :release_orchestration do let_it_be(:project) { create(:project, :public) } let_it_be_with_reload(:existing_subscription) { create(:activity_pub_releases_subscription, project: project) } describe '#execute' do let(:service) { described_class.new(project, payload) } let(:payload) { nil } context 'with a valid payload' do let(:payload) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/new-actor#unfollow-1', type: 'Undo', actor: actor, object: { id: 'https://example.com/new-actor#follow-1', type: 'Follow', actor: actor, object: 'https://localhost/our/project/-/releases' } }.with_indifferent_access end let(:actor) { existing_subscription.subscriber_url } context 'when there is a subscription for this actor' do it 'deletes the subscription' do service.execute expect(ActivityPub::ReleasesSubscription.where(id: existing_subscription.id).first).to be_nil end it 'returns true' do expect(service.execute).to be_truthy end end context 'when there is no subscription for this actor' do before do allow(ActivityPub::ReleasesSubscription).to receive(:find_by_project_and_subscriber).and_return(nil) end it 'does not delete anything' do expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count } end it 'returns true' do expect(service.execute).to be_truthy end end end shared_examples 'invalid unfollow request' do it 'does not delete anything' do expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count } end it 'sets an error' do service.execute expect(service.errors).not_to be_empty end it 'returns false' do expect(service.execute).to be_falsey end end context 'when actor is missing' do let(:payload) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/new-actor#unfollow-1', type: 'Undo', object: { id: 'https://example.com/new-actor#follow-1', type: 'Follow', object: 'https://localhost/our/project/-/releases' } }.with_indifferent_access end it_behaves_like 'invalid unfollow request' end context 'when actor is an object with no id attribute' do let(:payload) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/new-actor#unfollow-1', actor: { type: 'Person' }, type: 'Undo', object: { id: 'https://example.com/new-actor#follow-1', type: 'Follow', actor: { type: 'Person' }, object: 'https://localhost/our/project/-/releases' } }.with_indifferent_access end it_behaves_like 'invalid unfollow request' end context 'when actor is neither a string nor an object' do let(:payload) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/new-actor#unfollow-1', actor: 27.13, type: 'Undo', object: { id: 'https://example.com/new-actor#follow-1', type: 'Follow', actor: 27.13, object: 'https://localhost/our/project/-/releases' } }.with_indifferent_access end it_behaves_like 'invalid unfollow request' end context "when actor tries to delete someone else's subscription" do let(:payload) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/actor#unfollow-1', type: 'Undo', actor: 'https://example.com/nasty-actor', object: { id: 'https://example.com/actor#follow-1', type: 'Follow', actor: existing_subscription.subscriber_url, object: 'https://localhost/our/project/-/releases' } }.with_indifferent_access end it 'does not delete anything' do expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count } end it 'returns true' do expect(service.execute).to be_truthy end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module ActivityPub module Projects class ReleasesFollowService < ReleasesSubscriptionService def execute unless subscriber_url errors << "You need to provide an actor id for your subscriber" return false end return true if previous_subscription.present? subscription = ReleasesSubscription.new( subscriber_url: subscriber_url, subscriber_inbox_url: subscriber_inbox_url, payload: payload, project: project ) unless subscription.save errors.concat(subscription.errors.full_messages) return false end enqueue_subscription(subscription) true end private def subscriber_inbox_url return unless payload['actor'].is_a?(Hash) payload['actor']['inbox'] end def enqueue_subscription(subscription) ReleasesSubscriptionWorker.perform_async(subscription.id) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ActivityPub::Projects::ReleasesFollowService, feature_category: :release_orchestration do let_it_be(:project) { create(:project, :public) } let_it_be_with_reload(:existing_subscription) { create(:activity_pub_releases_subscription, project: project) } describe '#execute' do let(:service) { described_class.new(project, payload) } let(:payload) { nil } before do allow(ActivityPub::Projects::ReleasesSubscriptionWorker).to receive(:perform_async) end context 'with a valid payload' do let(:payload) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/new-actor#follow-1', type: 'Follow', actor: actor, object: 'https://localhost/our/project/-/releases' }.with_indifferent_access end let(:actor) { 'https://example.com/new-actor' } context 'when there is no subscription for that actor' do before do allow(ActivityPub::ReleasesSubscription).to receive(:find_by_project_and_subscriber).and_return(nil) end it 'sets the subscriber url' do service.execute expect(ActivityPub::ReleasesSubscription.last.subscriber_url).to eq 'https://example.com/new-actor' end it 'sets the payload' do service.execute expect(ActivityPub::ReleasesSubscription.last.payload).to eq payload end it 'sets the project' do service.execute expect(ActivityPub::ReleasesSubscription.last.project_id).to eq project.id end it 'saves the subscription' do expect { service.execute }.to change { ActivityPub::ReleasesSubscription.count }.by(1) end it 'queues the subscription job' do service.execute expect(ActivityPub::Projects::ReleasesSubscriptionWorker).to have_received(:perform_async) end it 'returns true' do expect(service.execute).to be_truthy end end context 'when there is already a subscription for that actor' do before do allow(ActivityPub::ReleasesSubscription).to receive(:find_by_project_and_subscriber) { existing_subscription } end it 'does not save the subscription' do expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count } end it 'does not queue the subscription job' do service.execute expect(ActivityPub::Projects::ReleasesSubscriptionWorker).not_to have_received(:perform_async) end it 'returns true' do expect(service.execute).to be_truthy end end end shared_examples 'invalid follow request' do it 'does not save the subscription' do expect { service.execute }.not_to change { ActivityPub::ReleasesSubscription.count } end it 'does not queue the subscription job' do service.execute expect(ActivityPub::Projects::ReleasesSubscriptionWorker).not_to have_received(:perform_async) end it 'sets an error' do service.execute expect(service.errors).not_to be_empty end it 'returns false' do expect(service.execute).to be_falsey end end context 'when actor is missing' do let(:payload) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/new-actor', type: 'Follow', object: 'https://localhost/our/project/-/releases' }.with_indifferent_access end it_behaves_like 'invalid follow request' end context 'when actor is an object with no id attribute' do let(:payload) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/new-actor', type: 'Follow', actor: { type: 'Person' }, object: 'https://localhost/our/project/-/releases' }.with_indifferent_access end it_behaves_like 'invalid follow request' end context 'when actor is neither a string nor an object' do let(:payload) do { '@context': 'https://www.w3.org/ns/activitystreams', id: 'https://example.com/new-actor', type: 'Follow', actor: 27.13, object: 'https://localhost/our/project/-/releases' }.with_indifferent_access end it_behaves_like 'invalid follow request' end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Git class TagPushService < ::BaseService include ChangeParams def execute return unless Gitlab::Git.tag_ref?(ref) project.repository.before_push_tag TagHooksService.new(project, current_user, params).execute unlock_artifacts true end private def unlock_artifacts return unless removing_tag? Ci::RefDeleteUnlockArtifactsWorker.perform_async(project.id, current_user.id, ref) end def removing_tag? Gitlab::Git.blank_ref?(newrev) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Git::TagPushService, feature_category: :source_code_management do include RepoHelpers let(:user) { create(:user) } let(:project) { create(:project, :repository) } let(:service) { described_class.new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }) } let(:blankrev) { Gitlab::Git::BLANK_SHA } let(:oldrev) { blankrev } let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0 let(:tag) { 'v1.1.0' } let(:ref) { "refs/tags/#{tag}" } describe "Push tags" do subject do service.execute service end it 'flushes general cached data' do expect(project.repository).to receive(:before_push_tag) subject end it 'does not flush the tags cache' do expect(project.repository).not_to receive(:expire_tags_cache) subject end end describe 'Hooks' do context 'run on a tag' do it 'delegates to Git::TagHooksService' do expect_next_instance_of(::Git::TagHooksService) do |hooks_service| expect(hooks_service.project).to eq(service.project) expect(hooks_service.current_user).to eq(service.current_user) expect(hooks_service.params).to eq(service.params) expect(hooks_service).to receive(:execute) end service.execute end end context 'run on a branch' do let(:ref) { 'refs/heads/master' } it 'does nothing' do expect(::Git::BranchHooksService).not_to receive(:new) service.execute end end end describe 'artifacts' do context 'create tag' do let(:oldrev) { blankrev } it 'does nothing' do expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async) service.execute end end context 'update tag' do it 'does nothing' do expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async) service.execute end end context 'delete tag' do let(:newrev) { blankrev } it 'unlocks artifacts' do expect(::Ci::RefDeleteUnlockArtifactsWorker) .to receive(:perform_async).with(project.id, user.id, "refs/tags/#{tag}") service.execute end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Git class ProcessRefChangesService < BaseService PIPELINE_PROCESS_LIMIT = 4 def execute changes = params[:changes] process_changes_by_action(:branch, changes.branch_changes) process_changes_by_action(:tag, changes.tag_changes) perform_housekeeping end private def process_changes_by_action(ref_type, changes) changes_by_action = group_changes_by_action(changes) changes_by_action.each do |action, changes| process_changes(ref_type, action, changes, execute_project_hooks: execute_project_hooks?(changes)) if changes.any? end end def group_changes_by_action(changes) changes.group_by do |change| change_action(change) end end def change_action(change) return :created if Gitlab::Git.blank_ref?(change[:oldrev]) return :removed if Gitlab::Git.blank_ref?(change[:newrev]) :pushed end def execute_project_hooks?(changes) changes.size <= Gitlab::CurrentSettings.push_event_hooks_limit end def process_changes(ref_type, action, changes, execute_project_hooks:) push_service_class = push_service_class_for(ref_type) create_bulk_push_event = changes.size > Gitlab::CurrentSettings.push_event_activities_limit merge_request_branches = merge_request_branches_for(ref_type, changes) changes.each do |change| push_service_class.new( project, current_user, change: change, push_options: params[:push_options], merge_request_branches: merge_request_branches, create_pipelines: under_process_limit?(change), execute_project_hooks: execute_project_hooks, create_push_event: !create_bulk_push_event ).execute end create_bulk_push_event(ref_type, action, changes) if create_bulk_push_event end def under_process_limit?(change) change[:index] < PIPELINE_PROCESS_LIMIT || Feature.enabled?(:git_push_create_all_pipelines, project) end def create_bulk_push_event(ref_type, action, changes) EventCreateService.new.bulk_push( project, current_user, Gitlab::DataBuilder::Push.build_bulk(action: action, ref_type: ref_type, changes: changes) ) end def push_service_class_for(ref_type) return Git::TagPushService if ref_type == :tag Git::BranchPushService end def merge_request_branches_for(ref_type, changes) return [] if ref_type == :tag MergeRequests::PushedBranchesService.new(project: project, current_user: current_user, params: { changes: changes }).execute end def perform_housekeeping housekeeping = Repositories::HousekeepingService.new(project) housekeeping.increment! housekeeping.execute if housekeeping.needed? rescue Repositories::HousekeepingService::LeaseTaken end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Git::ProcessRefChangesService, feature_category: :source_code_management do let(:project) { create(:project, :repository) } let(:user) { project.first_owner } let(:params) { { changes: git_changes } } subject { described_class.new(project, user, params) } shared_examples_for 'service for processing ref changes' do |push_service_class| let(:service) { double(execute: true) } let(:git_changes) { double(branch_changes: [], tag_changes: []) } def multiple_changes(change, count) Array.new(count).map.with_index do |n, index| { index: index, oldrev: change[:oldrev], newrev: change[:newrev], ref: "#{change[:ref]}#{n}" } end end let(:changes) do [ { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" }, { index: 1, oldrev: '123456', newrev: '789012', ref: "#{ref_prefix}/update" }, { index: 2, oldrev: '123456', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/delete" } ] end before do allow(git_changes).to receive(changes_method).and_return(changes) end it "calls #{push_service_class}" do expect(push_service_class) .to receive(:new) .with(project, project.first_owner, hash_including(execute_project_hooks: true, create_push_event: true)) .exactly(changes.count).times .and_return(service) subject.execute end context 'changes exceed push_event_hooks_limit' do let(:push_event_hooks_limit) { 3 } let(:changes) do multiple_changes( { oldrev: '123456', newrev: '789012', ref: "#{ref_prefix}/test" }, push_event_hooks_limit + 1 ) end before do stub_application_setting(push_event_hooks_limit: push_event_hooks_limit) end it "calls #{push_service_class} with execute_project_hooks set to false" do expect(push_service_class) .to receive(:new) .with(project, project.first_owner, hash_including(execute_project_hooks: false)) .exactly(changes.count).times .and_return(service) subject.execute end end context 'changes exceed push_event_activities_limit per action' do let(:push_event_activities_limit) { 3 } let(:changes) do [ { oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create" }, { oldrev: '123456', newrev: '789012', ref: "#{ref_prefix}/update" }, { oldrev: '123456', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/delete" } ].map do |change| multiple_changes(change, push_event_activities_limit + 1) end.flatten end before do stub_application_setting(push_event_activities_limit: push_event_activities_limit) end it "calls #{push_service_class} with create_push_event set to false" do expect(push_service_class) .to receive(:new) .with(project, project.first_owner, hash_including(create_push_event: false)) .exactly(changes.count).times .and_return(service) subject.execute end it 'creates events per action' do allow(push_service_class).to receive(:new).and_return(service) expect { subject.execute }.to change { Event.count }.by(3) end end context 'pipeline creation' do context 'with valid .gitlab-ci.yml' do before do stub_ci_pipeline_to_return_yaml_file allow_any_instance_of(Project) .to receive(:commit) .and_return(project.commit) if changes_method == :branch_changes allow_any_instance_of(Repository).to receive(:branch_exists?) { true } end if changes_method == :tag_changes allow_any_instance_of(Repository).to receive(:tag_exists?) { true } end allow(Gitlab::Git::Commit).to receive(:between) { [] } end context 'when git_push_create_all_pipelines is disabled' do before do stub_feature_flags(git_push_create_all_pipelines: false) end it 'creates pipeline for branches and tags' do subject.execute expect(Ci::Pipeline.pluck(:ref)).to contain_exactly('create', 'update', 'delete') end it "creates exactly #{described_class::PIPELINE_PROCESS_LIMIT} pipelines" do stub_const("#{described_class}::PIPELINE_PROCESS_LIMIT", changes.count - 1) expect { subject.execute }.to change { Ci::Pipeline.count }.by(described_class::PIPELINE_PROCESS_LIMIT) end end context 'when git_push_create_all_pipelines is enabled' do before do stub_feature_flags(git_push_create_all_pipelines: true) end it 'creates all pipelines' do expect { subject.execute }.to change { Ci::Pipeline.count }.by(changes.count) end end end context 'with invalid .gitlab-ci.yml' do before do stub_ci_pipeline_yaml_file(nil) allow(Gitlab::Git::Commit).to receive(:between) { [] } end it 'does not create a pipeline' do expect { subject.execute }.not_to change { Ci::Pipeline.count } end end end describe "housekeeping", :clean_gitlab_redis_cache, :clean_gitlab_redis_queues, :clean_gitlab_redis_shared_state do let(:housekeeping) { Repositories::HousekeepingService.new(project) } before do allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping) allow(push_service_class) .to receive(:new) .with(project, project.first_owner, hash_including(execute_project_hooks: true, create_push_event: true)) .exactly(changes.count).times .and_return(service) end it 'does not perform housekeeping when not needed' do expect(housekeeping).not_to receive(:execute) subject.execute end context 'when housekeeping is needed' do before do allow(housekeeping).to receive(:needed?).and_return(true) end it 'performs housekeeping' do expect(housekeeping).to receive(:execute) subject.execute end it 'does not raise an exception' do allow(housekeeping).to receive(:try_obtain_lease).and_return(false) subject.execute end end it 'increments the push counter' do expect(housekeeping).to receive(:increment!) subject.execute end end end context 'branch changes' do let(:changes_method) { :branch_changes } let(:ref_prefix) { 'refs/heads' } it_behaves_like 'service for processing ref changes', Git::BranchPushService context 'when there are merge requests associated with branches' do let(:tag_changes) do [ { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "refs/tags/v10.0.0" } ] end let(:branch_changes) do [ { index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create1" }, { index: 1, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789013', ref: "#{ref_prefix}/create2" }, { index: 2, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" }, { index: 3, oldrev: '789015', newrev: '789016', ref: "#{ref_prefix}/changed1" }, { index: 4, oldrev: '789017', newrev: '789018', ref: "#{ref_prefix}/changed2" }, { index: 5, oldrev: '789019', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/removed1" }, { index: 6, oldrev: '789020', newrev: Gitlab::Git::BLANK_SHA, ref: "#{ref_prefix}/removed2" } ] end let(:git_changes) { double(branch_changes: branch_changes, tag_changes: tag_changes) } before do allow(MergeRequests::PushedBranchesService).to receive(:new).and_return( double(execute: %w[create1 create2]), double(execute: %w[changed1]), double(execute: %w[removed2]) ) allow(Gitlab::Git::Commit).to receive(:between).and_return([]) end it 'schedules job for existing merge requests' do expect(UpdateMergeRequestsWorker).to receive(:perform_async).with( project.id, user.id, Gitlab::Git::BLANK_SHA, '789012', "#{ref_prefix}/create1", { 'push_options' => nil }).ordered expect(UpdateMergeRequestsWorker).to receive(:perform_async).with( project.id, user.id, Gitlab::Git::BLANK_SHA, '789013', "#{ref_prefix}/create2", { 'push_options' => nil }).ordered expect(UpdateMergeRequestsWorker).to receive(:perform_async).with( project.id, user.id, '789015', '789016', "#{ref_prefix}/changed1", { 'push_options' => nil }).ordered expect(UpdateMergeRequestsWorker).to receive(:perform_async).with( project.id, user.id, '789020', Gitlab::Git::BLANK_SHA, "#{ref_prefix}/removed2", { 'push_options' => nil }).ordered subject.execute end end end context 'tag changes' do let(:changes_method) { :tag_changes } let(:ref_prefix) { 'refs/tags' } it_behaves_like 'service for processing ref changes', Git::TagPushService end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Git class TagHooksService < ::Git::BaseHooksService private def hook_name :tag_push_hooks end def limited_commits [tag_commit].compact end def commits_count limited_commits.count end def event_message tag&.message end def tag strong_memoize(:tag) do next if Gitlab::Git.blank_ref?(newrev) tag_name = Gitlab::Git.ref_name(ref) tag = project.repository.find_tag(tag_name) tag if tag && tag.target == newrev end end def tag_commit strong_memoize(:tag_commit) do project.commit(tag.dereferenced_target) if tag end end end end Git::TagHooksService.prepend_mod_with('Git::TagHooksService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Git::TagHooksService, :service, feature_category: :source_code_management do let(:user) { create(:user) } let(:project) { create(:project, :repository) } let(:oldrev) { Gitlab::Git::BLANK_SHA } let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0 let(:ref) { "refs/tags/#{tag_name}" } let(:tag_name) { 'v1.1.0' } let(:tag) { project.repository.find_tag(tag_name) } let(:commit) { tag.dereferenced_target } let(:service) do described_class.new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }) end describe 'System hooks' do it 'executes system hooks' do push_data = service.send(:push_data) expect(project).to receive(:has_active_hooks?).and_return(true) expect_next_instance_of(SystemHooksService) do |system_hooks_service| expect(system_hooks_service) .to receive(:execute_hooks) .with(push_data, :tag_push_hooks) end service.execute end end describe "Webhooks" do it "executes hooks on the project" do expect(project).to receive(:has_active_hooks?).and_return(true) expect(project).to receive(:execute_hooks) service.execute end end describe "Pipelines" do before do stub_ci_pipeline_to_return_yaml_file project.add_developer(user) end it "creates a new pipeline" do expect { service.execute }.to change { Ci::Pipeline.count } expect(Ci::Pipeline.last).to be_push end end describe 'Push data' do shared_examples_for 'tag push data expectations' do subject(:push_data) { service.send(:push_data) } it 'has expected push data attributes' do is_expected.to match a_hash_including( object_kind: 'tag_push', ref: ref, ref_protected: project.protected_for?(ref), before: oldrev, after: newrev, message: tag.message, user_id: user.id, user_name: user.name, project_id: project.id ) end context "with repository data" do subject { push_data[:repository] } it 'has expected repository attributes' do is_expected.to match a_hash_including( name: project.name, url: project.url_to_repo, description: project.description, homepage: project.web_url ) end end context "with commits" do subject { push_data[:commits] } it { is_expected.to be_an(Array) } it 'has 1 element' do expect(subject.size).to eq(1) end context "the commit" do subject { push_data[:commits].first } it { is_expected.to include(timestamp: commit.date.xmlschema) } it 'has expected commit attributes' do is_expected.to match a_hash_including( id: commit.id, message: commit.safe_message, url: [ Gitlab.config.gitlab.url, project.namespace.to_param, project.to_param, '-', 'commit', commit.id ].join('/') ) end context "with an author" do subject { push_data[:commits].first[:author] } it 'has expected author attributes' do is_expected.to match a_hash_including( name: commit.author_name, email: commit.author_email ) end end end end end context 'annotated tag' do include_examples 'tag push data expectations' end context 'lightweight tag' do let(:tag_name) { 'light-tag' } let(:newrev) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' } before do # Create the lightweight tag project.repository.write_ref("refs/tags/#{tag_name}", newrev) # Clear tag list cache project.repository.expire_tags_cache end include_examples 'tag push data expectations' end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Git class BaseHooksService < ::BaseService include Gitlab::Utils::StrongMemoize include ChangeParams # The N most recent commits to process in a single push payload. PROCESS_COMMIT_LIMIT = 100 def execute create_events create_pipelines execute_project_hooks # Not a hook, but it needs access to the list of changed commits enqueue_invalidate_cache enqueue_notify_kas success end private def hook_name raise NotImplementedError, "Please implement #{self.class}##{__method__}" end # This should return PROCESS_COMMIT_LIMIT commits, ordered with newest last def limited_commits raise NotImplementedError, "Please implement #{self.class}##{__method__}" end def commits_count raise NotImplementedError, "Please implement #{self.class}##{__method__}" end def event_message nil end def invalidated_file_types [] end # Push events in the activity feed only show information for the # last commit. def create_events return unless params.fetch(:create_push_event, true) EventCreateService.new.push(project, current_user, event_push_data) end def create_pipelines return unless params.fetch(:create_pipelines, true) response = Ci::CreatePipelineService .new(project, current_user, pipeline_params) .execute(:push, **pipeline_options) log_pipeline_errors(response.message) unless response.payload.persisted? end def execute_project_hooks return unless params.fetch(:execute_project_hooks, true) # Creating push_data invokes one CommitDelta RPC per commit. Only # build this data if we actually need it. project.execute_hooks(push_data, hook_name) if project.has_active_hooks?(hook_name) return unless project.has_active_integrations?(hook_name) project.execute_integrations(push_data, hook_name, skip_ci: integration_push_options&.fetch(:skip_ci).present?) end def enqueue_invalidate_cache file_types = invalidated_file_types return unless file_types.present? ProjectCacheWorker.perform_async(project.id, file_types, [], false) end def enqueue_notify_kas return unless Gitlab::Kas.enabled? Clusters::Agents::NotifyGitPushWorker.perform_async(project.id) end def pipeline_params strong_memoize(:pipeline_params) do { before: oldrev, after: newrev, ref: ref, variables_attributes: generate_vars_from_push_options || [], push_options: params[:push_options] || {}, checkout_sha: Gitlab::DataBuilder::Push.checkout_sha( project.repository, newrev, ref) } end end def ci_variables_from_push_options strong_memoize(:ci_variables_from_push_options) do push_options&.dig(:ci, :variable) end end def integration_push_options strong_memoize(:integration_push_options) do push_options&.dig(:integrations) end end def push_options strong_memoize(:push_options) do params[:push_options]&.deep_symbolize_keys end end def generate_vars_from_push_options return [] unless ci_variables_from_push_options ci_variables_from_push_options.map do |var_definition, _count| key, value = var_definition.to_s.split("=", 2) # Accept only valid format. We ignore the following formats # 1. "=123". In this case, `key` will be an empty string # 2. "FOO". In this case, `value` will be nil. # However, the format "FOO=" will result in key beign `FOO` and value # being an empty string. This is acceptable. next if key.blank? || value.nil? { "key" => key, "variable_type" => "env_var", "secret_value" => value } end.compact end def push_data_params(commits:, with_changed_files: true) { oldrev: oldrev, newrev: newrev, ref: ref, project: project, user: current_user, commits: commits, message: event_message, commits_count: commits_count, with_changed_files: with_changed_files } end def event_push_data # We only need the newest commit for the event push, and we don't # need the full deltas either. @event_push_data ||= Gitlab::DataBuilder::Push.build( **push_data_params(commits: limited_commits.last, with_changed_files: false) ) end def push_data @push_data ||= Gitlab::DataBuilder::Push.build(**push_data_params(commits: limited_commits)) # Dependent code may modify the push data, so return a duplicate each time @push_data.dup end # to be overridden in EE def pipeline_options {} end def log_pipeline_errors(error_message) data = { class: self.class.name, correlation_id: Labkit::Correlation::CorrelationId.current_id.to_s, project_id: project.id, project_path: project.full_path, message: "Error creating pipeline", errors: error_message, pipeline_params: sanitized_pipeline_params } logger.warn(data) end def sanitized_pipeline_params pipeline_params.except(:push_options) end def logger if Gitlab::Runtime.sidekiq? Sidekiq.logger else # This service runs in Sidekiq, so this shouldn't ever be # called, but this is included just in case. Gitlab::IntegrationsLogger end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Git::BaseHooksService, feature_category: :source_code_management do include RepoHelpers let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :repository) } let(:oldrev) { Gitlab::Git::BLANK_SHA } let(:newrev) { "8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b" } # gitlab-test: git rev-parse refs/tags/v1.1.0 let(:ref) { 'refs/tags/v1.1.0' } let(:checkout_sha) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' } let(:test_service) do Class.new(described_class) do def hook_name :push_hooks end def limited_commits [] end def commits_count 0 end end end subject { test_service.new(project, user, params) } let(:params) do { change: { oldrev: oldrev, newrev: newrev, ref: ref } } end describe 'push event' do it 'creates push event' do expect_next_instance_of(EventCreateService) do |service| expect(service).to receive(:push) end subject.execute end context 'create_push_event is set to false' do before do params[:create_push_event] = false end it 'does not create push event' do expect(EventCreateService).not_to receive(:new) subject.execute end end end describe 'project hooks and integrations' do context 'hooks' do before do expect(project).to receive(:has_active_hooks?).and_return(active) end context 'active hooks' do let(:active) { true } it 'executes the hooks' do expect(subject).to receive(:push_data).at_least(:once).and_call_original expect(project).to receive(:execute_hooks) subject.execute end end context 'inactive hooks' do let(:active) { false } it 'does not execute the hooks' do expect(subject).not_to receive(:push_data) expect(project).not_to receive(:execute_hooks) subject.execute end end end context 'with integrations' do before do expect(project).to receive(:has_active_integrations?).and_return(active) end context 'with active integrations' do let(:active) { true } it 'executes the services' do expect(subject).to receive(:push_data).at_least(:once).and_call_original expect(project).to receive(:execute_integrations).with(kind_of(Hash), subject.hook_name, skip_ci: false) subject.execute end context 'with integrations.skip_ci push option' do before do params[:push_options] = { integrations: { skip_ci: true } } end it 'executes the services' do expect(subject).to receive(:push_data).at_least(:once).and_call_original expect(project).to receive(:execute_integrations).with(kind_of(Hash), subject.hook_name, skip_ci: true) subject.execute end end end context 'with inactive integrations' do let(:active) { false } it 'does not execute the services' do expect(subject).not_to receive(:push_data) expect(project).not_to receive(:execute_integrations) subject.execute end end end context 'when execute_project_hooks param is set to false' do before do params[:execute_project_hooks] = false allow(project).to receive(:has_active_hooks?).and_return(true) allow(project).to receive(:has_active_integrations?).and_return(true) end it 'does not execute hooks and integrations' do expect(project).not_to receive(:execute_hooks) expect(project).not_to receive(:execute_integrations) subject.execute end end end describe 'Generating CI variables from push options' do let(:pipeline_params) do { after: newrev, before: oldrev, checkout_sha: checkout_sha, push_options: push_options, # defined in each context ref: ref, variables_attributes: variables_attributes # defined in each context } end shared_examples 'creates pipeline with params and expected variables' do let(:pipeline_service) { double(execute: service_response) } let(:service_response) { double(error?: false, payload: pipeline, message: "Error") } let(:pipeline) { double(persisted?: true) } it 'calls the create pipeline service' do expect(Ci::CreatePipelineService) .to receive(:new) .with(project, user, pipeline_params) .and_return(pipeline_service) expect(subject).not_to receive(:log_pipeline_errors) subject.execute end end context 'with empty push options' do let(:push_options) { {} } let(:variables_attributes) { [] } it_behaves_like 'creates pipeline with params and expected variables' end context 'with push options not specifying variables' do let(:push_options) do { mr: { create: true } } end let(:variables_attributes) { [] } before do params[:push_options] = push_options end it_behaves_like 'creates pipeline with params and expected variables' end context 'with push options specifying variables' do let(:push_options) do { ci: { variable: { "FOO=123": 1, "BAR=456": 1, "MNO=890=ABC": 1 } } } end let(:variables_attributes) do [ { "key" => "FOO", "variable_type" => "env_var", "secret_value" => "123" }, { "key" => "BAR", "variable_type" => "env_var", "secret_value" => "456" }, { "key" => "MNO", "variable_type" => "env_var", "secret_value" => "890=ABC" } ] end before do params[:push_options] = push_options end it_behaves_like 'creates pipeline with params and expected variables' end context 'with push options not specifying variables in correct format' do let(:push_options) do { ci: { variable: { "FOO=123": 1, "BAR": 1, "=MNO": 1 } } } end let(:variables_attributes) do [ { "key" => "FOO", "variable_type" => "env_var", "secret_value" => "123" } ] end before do params[:push_options] = push_options end it_behaves_like 'creates pipeline with params and expected variables' end end describe "Pipeline creation" do let(:pipeline_params) do { after: newrev, before: oldrev, checkout_sha: checkout_sha, push_options: push_options, ref: ref, variables_attributes: variables_attributes } end let(:pipeline_service) { double(execute: service_response) } let(:push_options) { {} } let(:variables_attributes) { [] } context "when the pipeline is persisted" do let(:pipeline) { double(persisted?: true) } context "and there are no errors" do let(:service_response) { double(error?: false, payload: pipeline, message: "Error") } it "returns success" do expect(Ci::CreatePipelineService) .to receive(:new) .with(project, user, pipeline_params) .and_return(pipeline_service) expect(subject.execute[:status]).to eq(:success) end end context "and there are errors" do let(:service_response) { double(error?: true, payload: pipeline, message: "Error") } it "does not log errors and returns success" do # This behaviour is due to the save_on_errors: true setting that is the default in the execute method. expect(Ci::CreatePipelineService) .to receive(:new) .with(project, user, pipeline_params) .and_return(pipeline_service) expect(subject).not_to receive(:log_pipeline_errors).with(service_response.message) expect(subject.execute[:status]).to eq(:success) end end end context "when the pipeline wasn't persisted" do let(:pipeline) { double(persisted?: false) } context "and there are no errors" do let(:service_response) { double(error?: false, payload: pipeline, message: nil) } it "returns success" do expect(Ci::CreatePipelineService) .to receive(:new) .with(project, user, pipeline_params) .and_return(pipeline_service) expect(subject).to receive(:log_pipeline_errors).with(service_response.message) expect(subject.execute[:status]).to eq(:success) end end context "and there are errors" do let(:service_response) { double(error?: true, payload: pipeline, message: "Error") } it "logs errors and returns success" do expect(Ci::CreatePipelineService) .to receive(:new) .with(project, user, pipeline_params) .and_return(pipeline_service) expect(subject).to receive(:log_pipeline_errors).with(service_response.message) expect(subject.execute[:status]).to eq(:success) end end end end describe 'notifying KAS' do let(:kas_enabled) { true } before do allow(Gitlab::Kas).to receive(:enabled?).and_return(kas_enabled) end it 'enqueues the notification worker' do expect(Clusters::Agents::NotifyGitPushWorker).to receive(:perform_async).with(project.id).once subject.execute end context 'when KAS is disabled' do let(:kas_enabled) { false } it do expect(Clusters::Agents::NotifyGitPushWorker).not_to receive(:perform_async) subject.execute end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Git class WikiPushService < ::BaseService # Maximum number of change events we will process on any single push MAX_CHANGES = 100 attr_reader :wiki def initialize(wiki, current_user, params) @wiki = wiki @current_user = current_user @params = params.dup end def execute # Execute model-specific callbacks wiki.after_post_receive process_changes perform_housekeeping end private def process_changes return unless can_process_wiki_events? push_changes.take(MAX_CHANGES).each do |change| # rubocop:disable CodeReuse/ActiveRecord next unless change.page.present? response = create_event_for(change) log_error(response.message) if response.error? end end def can_process_wiki_events? # TODO: Support activity events for group wikis # https://gitlab.com/gitlab-org/gitlab/-/issues/209306 wiki.is_a?(ProjectWiki) end def push_changes default_branch_changes.flat_map do |change| raw_changes(change).map { |raw| Git::WikiPushService::Change.new(wiki, change, raw) } end end def raw_changes(change) wiki.repository.raw.raw_changes_between(change[:oldrev], change[:newrev]) end def create_event_for(change) event_service.execute( change.last_known_slug, change.page, change.event_action, change.sha ) end def event_service @event_service ||= WikiPages::EventCreateService.new(current_user) end def on_default_branch?(change) wiki.default_branch == ::Gitlab::Git.branch_name(change[:ref]) end # See: [Gitlab::GitPostReceive#changes] def changes params[:changes] || [] end def default_branch_changes @default_branch_changes ||= changes.select { |change| on_default_branch?(change) } end def perform_housekeeping housekeeping = Repositories::HousekeepingService.new(wiki) housekeeping.increment! housekeeping.execute if housekeeping.needed? rescue Repositories::HousekeepingService::LeaseTaken # no-op end end end Git::WikiPushService.prepend_mod_with('Git::WikiPushService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Git::WikiPushService, services: true, feature_category: :wiki do include RepoHelpers let_it_be(:current_user) { create(:user) } let_it_be(:key_id) { create(:key, user: current_user).shell_id } let(:wiki) { create(:project_wiki, user: current_user) } let(:default_branch) { wiki.default_branch } let(:repository) { wiki.repository } before do repository.create_if_not_exists(default_branch) end describe '#execute' do it 'executes model-specific callbacks' do expect(wiki).to receive(:after_post_receive) create_service(current_sha).execute end end describe '#process_changes' do context 'the push contains more than the permitted number of changes' do def run_service process_changes { described_class::MAX_CHANGES.succ.times { write_new_page } } end it 'creates only MAX_CHANGES events' do expect { run_service }.to change(Event, :count).by(described_class::MAX_CHANGES) end end context 'default_branch collides with a tag' do it 'creates only one event' do base_sha = current_sha write_new_page service = create_service(base_sha, ['refs/heads/master', 'refs/tags/master']) expect { service.execute }.to change(Event, :count).by(1) end end describe 'successfully creating events' do let(:count) { Event::WIKI_ACTIONS.size } def run_service wiki_page_a = create(:wiki_page, wiki: wiki) wiki_page_b = create(:wiki_page, wiki: wiki) process_changes do write_new_page update_page(wiki_page_a.title) delete_page(wiki_page_b.page) end end it 'creates one event for every wiki action' do expect { run_service }.to change(Event, :count).by(count) end it 'handles all known actions' do run_service expect(Event.last(count).pluck(:action)).to match_array(Event::WIKI_ACTIONS.map(&:to_s)) end context 'when wiki_page slug is not UTF-8 ' do let(:binary_title) { Gitlab::EncodingHelper.encode_binary('编码') } def run_service wiki_page = create(:wiki_page, wiki: wiki, title: "#{binary_title} 'foo'") process_changes do # Test that new_path is converted to UTF-8 create(:wiki_page, wiki: wiki, title: binary_title) # Test that old_path is also is converted to UTF-8 update_page(wiki_page.title, 'foo') end end it 'does not raise an error' do expect { run_service }.not_to raise_error end end end context 'two pages have been created' do def run_service process_changes do write_new_page write_new_page end end it 'creates two events' do expect { run_service }.to change(Event, :count).by(2) end it 'creates two metadata records' do expect { run_service }.to change(WikiPage::Meta, :count).by(2) end it 'creates appropriate events' do run_service expect(Event.last(2)).to all(have_attributes(wiki_page?: true, action: 'created')) end end context 'a non-page file as been added' do it 'does not create events, or WikiPage metadata' do expect do process_changes { write_non_page } end.not_to change { [Event.count, WikiPage::Meta.count] } end end context 'one page, and one non-page have been created' do def run_service process_changes do write_new_page write_non_page end end it 'creates a wiki page creation event' do expect { run_service }.to change(Event, :count).by(1) expect(Event.last).to have_attributes(wiki_page?: true, action: 'created') end it 'creates one metadata record' do expect { run_service }.to change(WikiPage::Meta, :count).by(1) end end context 'one page has been added, and then updated' do def run_service process_changes do title = write_new_page update_page(title) end end it 'creates just a single event' do expect { run_service }.to change(Event, :count).by(1) end it 'creates just one metadata record' do expect { run_service }.to change(WikiPage::Meta, :count).by(1) end it 'creates a new wiki page creation event' do run_service expect(Event.last).to have_attributes( wiki_page?: true, action: 'created' ) end end context 'when a page we already know about has been updated' do let(:wiki_page) { create(:wiki_page, wiki: wiki) } before do create(:wiki_page_meta, :for_wiki_page, wiki_page: wiki_page) end def run_service process_changes { update_page(wiki_page.title) } end it 'does not create a new meta-data record' do expect { run_service }.not_to change(WikiPage::Meta, :count) end it 'creates a new event' do expect { run_service }.to change(Event, :count).by(1) end it 'adds an update event' do run_service expect(Event.last).to have_attributes( wiki_page?: true, action: 'updated' ) end end context 'when a page we do not know about has been updated' do def run_service wiki_page = create(:wiki_page, wiki: wiki) process_changes { update_page(wiki_page.title) } end it 'creates a new meta-data record' do expect { run_service }.to change(WikiPage::Meta, :count).by(1) end it 'creates a new event' do expect { run_service }.to change(Event, :count).by(1) end it 'adds an update event' do run_service expect(Event.last).to have_attributes( wiki_page?: true, action: 'updated' ) end end context 'when a page we do not know about has been deleted' do def run_service wiki_page = create(:wiki_page, wiki: wiki) process_changes { delete_page(wiki_page.page) } end it 'create a new meta-data record' do expect { run_service }.to change(WikiPage::Meta, :count).by(1) end it 'creates a new event' do expect { run_service }.to change(Event, :count).by(1) end it 'adds an update event' do run_service expect(Event.last).to have_attributes( wiki_page?: true, action: 'destroyed' ) end end it 'calls log_error for every event we cannot create' do base_sha = current_sha count = 3 count.times { write_new_page } message = 'something went very very wrong' allow_next_instance_of(WikiPages::EventCreateService, current_user) do |service| allow(service).to receive(:execute) .with(String, WikiPage, Symbol, String) .and_return(ServiceResponse.error(message: message)) end service = create_service(base_sha) expect(service).to receive(:log_error).exactly(count).times.with(message) service.execute end describe 'feature flags' do shared_examples 'a no-op push' do it 'does not create any events' do expect { process_changes { write_new_page } }.not_to change(Event, :count) end it 'does not even look for events to process' do base_sha = current_sha write_new_page service = create_service(base_sha) expect(service).not_to receive(:changed_files) service.execute end end end end describe '#perform_housekeeping', :clean_gitlab_redis_shared_state do let(:housekeeping) { Repositories::HousekeepingService.new(wiki) } subject { create_service(current_sha).execute } before do allow(Repositories::HousekeepingService).to receive(:new).and_return(housekeeping) end it 'does not perform housekeeping when not needed' do expect(housekeeping).not_to receive(:execute) subject end context 'when housekeeping is needed' do before do allow(housekeeping).to receive(:needed?).and_return(true) end it 'performs housekeeping' do expect(housekeeping).to receive(:execute) subject end it 'does not raise an exception' do allow(housekeeping).to receive(:try_obtain_lease).and_return(false) expect { subject }.not_to raise_error end end it 'increments the push counter' do expect(housekeeping).to receive(:increment!) subject end end # In order to construct the correct GitPostReceive object that represents the # changes we are applying, we need to describe the changes between old-ref and # new-ref. Old ref (the base sha) we have to capture before we perform any # changes. Once the changes have been applied, we can execute the service to # process them. def process_changes(&block) base_sha = current_sha yield create_service(base_sha).execute end def create_service(base, refs = ['refs/heads/master']) changes = post_received(base, refs).changes described_class.new(wiki, current_user, changes: changes) end def post_received(base, refs) change_str = refs.map { |ref| +"#{base} #{current_sha} #{ref}" }.join("\n") post_received = ::Gitlab::GitPostReceive.new(wiki.container, key_id, change_str, {}) allow(post_received).to receive(:identify).with(key_id).and_return(current_user) post_received end def current_sha repository.commit('master')&.id || Gitlab::Git::BLANK_SHA end # It is important not to re-use the WikiPage services here, since they create # events - these helper methods below are intended to simulate actions on the repo # that have not gone through our services. def write_new_page generate(:wiki_page_title).tap do |t| repository.create_file( current_user, ::Wiki.sluggified_full_path(t, 'md'), 'Hello', **commit_details ) end end # We write something to the wiki-repo that is not a page - as, for example, an # attachment. This will appear as a raw-diff change, but wiki.find_page will # return nil. def write_non_page params = { file_name: 'attachment.log', file_content: 'some stuff', branch_name: 'master' } ::Wikis::CreateAttachmentService.new(container: wiki.container, current_user: current_user, params: params).execute end def update_page(title, new_title = nil) new_title = title unless new_title.present? old_path = ::Wiki.sluggified_full_path(title, 'md') new_path = ::Wiki.sluggified_full_path(new_title, 'md') repository.update_file( current_user, new_path, 'Hey', **commit_details.merge(previous_path: old_path) ) end def delete_page(page) repository.delete_file(current_user, page.path, **commit_details) end def commit_details { branch_name: default_branch, message: "commit message", author_email: current_user.email, author_name: current_user.name } end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Git class BranchHooksService < ::Git::BaseHooksService extend ::Gitlab::Utils::Override JIRA_SYNC_BATCH_SIZE = 20 JIRA_SYNC_BATCH_DELAY = 10.seconds def execute execute_branch_hooks super.tap do enqueue_update_signatures end end private def hook_name :push_hooks end def limited_commits strong_memoize(:limited_commits) { threshold_commits.last(PROCESS_COMMIT_LIMIT) } end # Taking limit+1 commits allows us to detect when the limit is in effect def threshold_commits strong_memoize(:threshold_commits) do if creating_default_branch? # The most recent PROCESS_COMMIT_LIMIT commits in the default branch. # They are returned newest-to-oldest, but we need to present them oldest-to-newest project.repository.commits(newrev, limit: PROCESS_COMMIT_LIMIT + 1).reverse! elsif creating_branch? # Use the pushed commits that aren't reachable by the default branch # as a heuristic. This may include more commits than are actually # pushed, but that shouldn't matter because we check for existing # cross-references later. project.repository.commits_between(project.default_branch, newrev, limit: PROCESS_COMMIT_LIMIT + 1) elsif updating_branch? project.repository.commits_between(oldrev, newrev, limit: PROCESS_COMMIT_LIMIT + 1) else # removing branch [] end end end def commits_count strong_memoize(:commits_count) do next threshold_commits.count if strong_memoized?(:threshold_commits) && threshold_commits.count <= PROCESS_COMMIT_LIMIT if creating_default_branch? project.repository.commit_count_for_ref(ref) elsif creating_branch? project.repository.count_commits_between(project.default_branch, newrev) elsif updating_branch? project.repository.count_commits_between(oldrev, newrev) else # removing branch 0 end end end override :invalidated_file_types def invalidated_file_types return super unless default_branch? && !creating_branch? modified_file_types end def modified_file_types paths = commit_paths.values.reduce(&:merge) || Set.new Gitlab::FileDetector.types_in_paths(paths) end def execute_branch_hooks project.repository.after_push_commit(branch_name) branch_create_hooks if creating_branch? branch_update_hooks if updating_branch? branch_change_hooks if creating_branch? || updating_branch? branch_remove_hooks if removing_branch? track_process_commit_limit_overflow end def branch_create_hooks project.repository.after_create_branch(expire_cache: false) project.after_create_default_branch if default_branch? end def branch_update_hooks # Update the bare repositories info/attributes file using the contents of # the default branch's .gitattributes file project.repository.copy_gitattributes(ref) if default_branch? end def branch_change_hooks enqueue_process_commit_messages enqueue_jira_connect_sync_messages track_ci_config_change_event end def branch_remove_hooks project.repository.after_remove_branch(expire_cache: false) end def track_ci_config_change_event return unless default_branch? commits_changing_ci_config.each do |commit| Gitlab::UsageDataCounters::HLLRedisCounter.track_event( 'o_pipeline_authoring_unique_users_committing_ciconfigfile', values: commit.author&.id ) end end def track_process_commit_limit_overflow return if threshold_commits.count <= PROCESS_COMMIT_LIMIT Gitlab::Metrics.add_event(:process_commit_limit_overflow) end # Schedules processing of commit messages def enqueue_process_commit_messages referencing_commits = limited_commits.select(&:matches_cross_reference_regex?) upstream_commit_ids = upstream_commit_ids(referencing_commits) referencing_commits.each do |commit| # Avoid reprocessing commits that already exist upstream if the project # is a fork. This will prevent duplicated/superfluous system notes on # mentionables referenced by a commit that is pushed to the upstream, # that is then also pushed to forks when these get synced by users. next if upstream_commit_ids.include?(commit.id) ProcessCommitWorker.perform_async( project.id, current_user.id, commit.to_hash, default_branch? ) end end def enqueue_jira_connect_sync_messages return unless project.jira_subscription_exists? branch_to_sync = branch_name if Atlassian::JiraIssueKeyExtractors::Branch.has_keys?(project, branch_name) commits_to_sync = filtered_commit_shas return if branch_to_sync.nil? && commits_to_sync.empty? if commits_to_sync.any? commits_to_sync.each_slice(JIRA_SYNC_BATCH_SIZE).with_index do |commits, i| JiraConnect::SyncBranchWorker.perform_in( JIRA_SYNC_BATCH_DELAY * i, project.id, branch_to_sync, commits, Atlassian::JiraConnect::Client.generate_update_sequence_id ) end else JiraConnect::SyncBranchWorker.perform_async( project.id, branch_to_sync, commits_to_sync, Atlassian::JiraConnect::Client.generate_update_sequence_id ) end end def filtered_commit_shas limited_commits.select { |commit| Atlassian::JiraIssueKeyExtractor.has_keys?(commit.safe_message) }.map(&:sha) end def signature_types [ ::CommitSignatures::GpgSignature, ::CommitSignatures::X509CommitSignature, ::CommitSignatures::SshSignature ] end def unsigned_commit_shas(commits) commit_shas = commits.map(&:sha) signature_types .map { |signature| signature.unsigned_commit_shas(commit_shas) } .reduce(&:&) end def enqueue_update_signatures unsigned = unsigned_commit_shas(limited_commits) return if unsigned.empty? signable = Gitlab::Git::Commit.shas_with_signatures(project.repository, unsigned) return if signable.empty? CreateCommitSignatureWorker.perform_async(signable, project.id) end # It's not sufficient to just check for a blank SHA as it's possible for the # branch to be pushed, but for the `post-receive` hook to never run: # https://gitlab.com/gitlab-org/gitlab-foss/issues/59257 def creating_branch? strong_memoize(:creating_branch) do Gitlab::Git.blank_ref?(oldrev) || !project.repository.branch_exists?(branch_name) end end def updating_branch? !creating_branch? && !removing_branch? end def removing_branch? Gitlab::Git.blank_ref?(newrev) end def creating_default_branch? creating_branch? && default_branch? end def default_branch? strong_memoize(:default_branch) do [nil, branch_name].include?(project.default_branch) end end def branch_name strong_memoize(:branch_name) { Gitlab::Git.ref_name(ref) } end def upstream_commit_ids(commits) set = Set.new upstream_project = project.fork_source if upstream_project upstream_project .commits_by(oids: commits.map(&:id)) .each { |commit| set << commit.id } end set end def commits_changing_ci_config commit_paths.select do |commit, paths| next if commit.merge_commit? paths.include?(project.ci_config_path_or_default) end.keys end def commit_paths strong_memoize(:commit_paths) do limited_commits.to_h do |commit| paths = Set.new(commit.raw_deltas.map(&:new_path)) [commit, paths] end end end end end Git::BranchHooksService.prepend_mod_with('Git::BranchHooksService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Git::BranchHooksService, :clean_gitlab_redis_shared_state, feature_category: :source_code_management do include RepoHelpers include ProjectForksHelper let(:project) { create(:project, :repository) } let(:user) { project.creator } let(:branch) { project.default_branch } let(:ref) { "refs/heads/#{branch}" } let(:commit_id) { sample_commit.id } let(:commit) { project.commit(commit_id) } let(:oldrev) { commit.parent_id } let(:newrev) { commit.id } let(:service) do described_class.new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }) end describe "Git Push Data" do subject(:push_data) { service.send(:push_data) } it 'has expected push data attributes' do is_expected.to match a_hash_including( object_kind: 'push', before: oldrev, after: newrev, ref: ref, ref_protected: project.protected_for?(ref), user_id: user.id, user_name: user.name, project_id: project.id ) end context "with repository data" do subject { push_data[:repository] } it 'has expected attributes' do is_expected.to match a_hash_including( name: project.name, url: project.url_to_repo, description: project.description, homepage: project.web_url ) end end context "with commits" do subject { push_data[:commits] } it { is_expected.to be_an(Array) } it 'has 1 element' do expect(subject.size).to eq(1) end context "the commit" do subject { push_data[:commits].first } it { expect(subject[:timestamp].in_time_zone).to eq(commit.date.in_time_zone) } it 'includes expected commit data' do is_expected.to match a_hash_including( id: commit.id, message: commit.safe_message, url: [ Gitlab.config.gitlab.url, project.namespace.to_param, project.to_param, '-', 'commit', commit.id ].join('/') ) end context "with a author" do subject { push_data[:commits].first[:author] } it 'includes expected author data' do is_expected.to match a_hash_including( name: commit.author_name, email: commit.author_email ) end end end end end describe 'Push Event' do let(:event) { Event.pushed_action.take } subject(:execute_service) { service.execute } context "with an existing branch" do it 'generates a push event with one commit' do execute_service expect(event).to be_an_instance_of(PushEvent) expect(event.project).to eq(project) expect(event).to be_pushed_action expect(event.push_event_payload).to be_an_instance_of(PushEventPayload) expect(event.push_event_payload.commit_from).to eq(oldrev) expect(event.push_event_payload.commit_to).to eq(newrev) expect(event.push_event_payload.commit_title).to eq('Change some files') expect(event.push_event_payload.ref).to eq('master') expect(event.push_event_payload.commit_count).to eq(1) end context 'with changing CI config' do before do allow_next_instance_of(Gitlab::Git::Diff) do |diff| allow(diff).to receive(:new_path).and_return('.gitlab-ci.yml') end end let!(:commit_author) { create(:user, email: sample_commit.author_email) } let(:tracking_params) do ['o_pipeline_authoring_unique_users_committing_ciconfigfile', values: commit_author.id] end it 'tracks the event' do time = Time.zone.now execute_service expect(Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: 'o_pipeline_authoring_unique_users_committing_ciconfigfile', start_date: time, end_date: time + 7.days)).to eq(1) end context 'when the branch is not the main branch' do let(:branch) { 'feature' } it 'does not track the event' do expect(Gitlab::UsageDataCounters::HLLRedisCounter) .not_to receive(:track_event).with(*tracking_params) execute_service end end context 'when the CI config is a different path' do before do project.ci_config_path = 'config/ci.yml' end it 'does not track the event' do expect(Gitlab::UsageDataCounters::HLLRedisCounter) .not_to receive(:track_event).with(*tracking_params) execute_service end end end end context "with a new default branch" do let(:oldrev) { Gitlab::Git::BLANK_SHA } it 'generates a push event with more than one commit' do execute_service expect(event).to be_an_instance_of(PushEvent) expect(event.project).to eq(project) expect(event).to be_pushed_action expect(event.push_event_payload).to be_an_instance_of(PushEventPayload) expect(event.push_event_payload.commit_from).to be_nil expect(event.push_event_payload.commit_to).to eq(newrev) expect(event.push_event_payload.commit_title).to eq('Change some files') expect(event.push_event_payload.ref).to eq('master') expect(event.push_event_payload.commit_count).to be > 1 end end context "with a new non-default branch" do let(:oldrev) { Gitlab::Git::BLANK_SHA } let(:branch) { 'fix' } let(:commit_id) { project.commit(branch).id } it 'generates a push event with more than one commit' do execute_service expect(event).to be_an_instance_of(PushEvent) expect(event.project).to eq(project) expect(event).to be_pushed_action expect(event.push_event_payload).to be_an_instance_of(PushEventPayload) expect(event.push_event_payload.commit_from).to be_nil expect(event.push_event_payload.commit_to).to eq(newrev) expect(event.push_event_payload.commit_title).to eq('Test file for directories with a leading dot') expect(event.push_event_payload.ref).to eq('fix') expect(event.push_event_payload.commit_count).to be > 1 end end context 'removing a branch' do let(:newrev) { Gitlab::Git::BLANK_SHA } it 'generates a push event with no commits' do execute_service expect(event).to be_an_instance_of(PushEvent) expect(event.project).to eq(project) expect(event).to be_pushed_action expect(event.push_event_payload).to be_an_instance_of(PushEventPayload) expect(event.push_event_payload.commit_from).to eq(oldrev) expect(event.push_event_payload.commit_to).to be_nil expect(event.push_event_payload.ref).to eq('master') expect(event.push_event_payload.commit_count).to eq(0) end end end describe 'Invalidating project cache' do let(:commit_id) do project.repository.update_file( user, 'README.md', '', message: 'Update', branch_name: branch ) end let(:blank_sha) { Gitlab::Git::BLANK_SHA } def clears_cache(extended: []) expect(service).to receive(:invalidated_file_types).and_return(extended) if extended.present? expect(ProjectCacheWorker) .to receive(:perform_async) .with(project.id, extended, [], false) end service.execute end def clears_extended_cache clears_cache(extended: %i[readme]) end context 'on default branch' do context 'create' do # FIXME: When creating the default branch,the cache worker runs twice before do allow(ProjectCacheWorker).to receive(:perform_async) end let(:oldrev) { blank_sha } it { clears_cache } end context 'update' do it { clears_extended_cache } end context 'remove' do let(:newrev) { blank_sha } # TODO: this case should pass, but we only take account of added files it { clears_cache } end end context 'on ordinary branch' do let(:branch) { 'fix' } context 'create' do let(:oldrev) { blank_sha } it { clears_cache } end context 'update' do it { clears_cache } end context 'remove' do let(:newrev) { blank_sha } it { clears_cache } end end end describe 'signatures' do context 'when the commit has a signature' do context 'when the signature is already cached' do before do create(:gpg_signature, commit_sha: commit.id) end it 'does not queue a CreateCommitSignatureWorker' do expect(CreateCommitSignatureWorker).not_to receive(:perform_async) service.execute end end context 'when the signature is not yet cached' do it 'queues a CreateCommitSignatureWorker' do expect(CreateCommitSignatureWorker).to receive(:perform_async).with([commit.id], project.id) service.execute end it 'can queue several commits to create the gpg signature' do allow(Gitlab::Git::Commit) .to receive(:shas_with_signatures) .and_return([sample_commit.id, another_sample_commit.id]) expect(CreateCommitSignatureWorker) .to receive(:perform_async) .with([sample_commit.id, another_sample_commit.id], project.id) service.execute end end end context 'when the commit does not have a signature' do before do allow(Gitlab::Git::Commit) .to receive(:shas_with_signatures) .with(project.repository, [sample_commit.id]) .and_return([]) end it 'does not queue a CreateCommitSignatureWorker' do expect(CreateCommitSignatureWorker) .not_to receive(:perform_async) .with(sample_commit.id, project.id) service.execute end end end describe 'Processing commit messages' do # Create 6 commits, 3 of which have references. Limiting to 4 commits, we # expect to see two commit message processors enqueued. let!(:commit_ids) do Array.new(6) do |i| message = "Issue #{'#' if i.even?}#{i}" project.repository.update_file( user, 'README.md', '', message: message, branch_name: branch ) end end let(:commits_count) { service.send(:commits_count) } let(:threshold_limit) { described_class::PROCESS_COMMIT_LIMIT + 1 } let(:oldrev) { project.commit(commit_ids.first).parent_id } let(:newrev) { commit_ids.last } before do stub_const("::Git::BaseHooksService::PROCESS_COMMIT_LIMIT", 4) end context 'creating the default branch' do let(:oldrev) { Gitlab::Git::BLANK_SHA } it 'processes a limited number of commit messages' do expect(project.repository) .to receive(:commits) .with(newrev, limit: threshold_limit) .and_call_original expect(ProcessCommitWorker).to receive(:perform_async).twice service.execute expect(commits_count).to eq(project.repository.commit_count_for_ref(newrev)) end it 'collects the related metrics' do expect(Gitlab::Metrics).to receive(:add_event).with(:push_commit, { branch: 'master' }) expect(Gitlab::Metrics).to receive(:add_event).with(:push_branch, {}) expect(Gitlab::Metrics).to receive(:add_event).with(:change_default_branch, {}) expect(Gitlab::Metrics).to receive(:add_event).with(:process_commit_limit_overflow) service.execute end context 'when limit is not hit' do before do stub_const("::Git::BaseHooksService::PROCESS_COMMIT_LIMIT", 100) end it 'does not collect the corresponding metric' do expect(Gitlab::Metrics).not_to receive(:add_event).with(:process_commit_limit_overflow) service.execute end end end context 'updating the default branch' do it 'processes a limited number of commit messages' do expect(project.repository) .to receive(:commits_between) .with(oldrev, newrev, limit: threshold_limit) .and_call_original expect(ProcessCommitWorker).to receive(:perform_async).twice service.execute expect(commits_count).to eq(project.repository.count_commits_between(oldrev, newrev)) end end context 'removing the default branch' do let(:newrev) { Gitlab::Git::BLANK_SHA } it 'does not process commit messages' do expect(project.repository).not_to receive(:commits) expect(project.repository).not_to receive(:commits_between) expect(ProcessCommitWorker).not_to receive(:perform_async) service.execute expect(commits_count).to eq(0) end end context 'creating a normal branch' do let(:branch) { 'fix' } let(:oldrev) { Gitlab::Git::BLANK_SHA } it 'processes a limited number of commit messages' do expect(project.repository) .to receive(:commits_between) .with(project.default_branch, newrev, limit: threshold_limit) .and_call_original expect(ProcessCommitWorker).to receive(:perform_async).twice service.execute expect(commits_count).to eq(project.repository.count_commits_between(project.default_branch, branch)) end end context 'updating a normal branch' do let(:branch) { 'fix' } it 'processes a limited number of commit messages' do expect(project.repository) .to receive(:commits_between) .with(oldrev, newrev, limit: threshold_limit) .and_call_original expect(ProcessCommitWorker).to receive(:perform_async).twice service.execute expect(commits_count).to eq(project.repository.count_commits_between(oldrev, newrev)) end end context 'removing a normal branch' do let(:branch) { 'fix' } let(:newrev) { Gitlab::Git::BLANK_SHA } it 'does not process commit messages' do expect(project.repository).not_to receive(:commits) expect(project.repository).not_to receive(:commits_between) expect(ProcessCommitWorker).not_to receive(:perform_async) service.execute expect(commits_count).to eq(0) end end context 'when the project is forked', :sidekiq_might_not_need_inline do let(:upstream_project) { project } let(:forked_project) { fork_project(upstream_project, user, repository: true, using_service: true) } let!(:forked_service) do described_class.new(forked_project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }) end context 'when commits already exists in the upstream project' do it 'does not process commit messages' do expect(ProcessCommitWorker).not_to receive(:perform_async) forked_service.execute end end context 'when a commit does not exist in the upstream repo' do # On top of the existing 6 commits, 3 of which have references, # create 2 more, 1 of which has a reference. Limiting to 4 commits, we # expect to see one commit message processor enqueued. let!(:forked_commit_ids) do Array.new(2) do |i| message = "Issue #{'#' if i.even?}#{i}" forked_project.repository.update_file( user, 'README.md', '', message: message, branch_name: branch ) end end let(:newrev) { forked_commit_ids.last } it 'processes the commit message' do expect(ProcessCommitWorker).to receive(:perform_async).once forked_service.execute end end context 'when the upstream project no longer exists' do it 'processes the commit messages' do upstream_project.destroy! expect(ProcessCommitWorker).to receive(:perform_async).twice forked_service.execute end end end end describe 'New branch detection' do let(:branch) { 'fix' } context 'oldrev is the blank SHA' do let(:oldrev) { Gitlab::Git::BLANK_SHA } it 'is treated as a new branch' do expect(service).to receive(:branch_create_hooks) service.execute end end context 'oldrev is set' do context 'Gitaly does not know about the branch' do it 'is treated as a new branch' do allow(project.repository).to receive(:branch_names) { [] } expect(service).to receive(:branch_create_hooks) service.execute end end context 'Gitaly knows about the branch' do it 'is not treated as a new branch' do expect(service).not_to receive(:branch_create_hooks) service.execute end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Git class BranchPushService < ::BaseService include Gitlab::Access include Gitlab::Utils::StrongMemoize include ChangeParams # This method will be called after each git update # and only if the provided user and project are present in GitLab. # # All callbacks for post receive action should be placed here. # # Next, this method: # 1. Creates the push event # 2. Updates merge requests # 3. Recognizes cross-references from commit messages # 4. Executes the project's webhooks # 5. Executes the project's services # 6. Checks if the project's main language has changed # def execute return unless Gitlab::Git.branch_ref?(ref) enqueue_update_mrs enqueue_detect_repository_languages enqueue_record_project_target_platforms execute_related_hooks stop_environments unlock_artifacts true end # Update merge requests that may be affected by this push. A new branch # could cause the last commit of a merge request to change. def enqueue_update_mrs return if params[:merge_request_branches]&.exclude?(branch_name) UpdateMergeRequestsWorker.perform_async( project.id, current_user.id, oldrev, newrev, ref, params.slice(:push_options).deep_stringify_keys ) end def enqueue_detect_repository_languages return unless default_branch? DetectRepositoryLanguagesWorker.perform_async(project.id) end def enqueue_record_project_target_platforms return unless default_branch? project.enqueue_record_project_target_platforms end # Only stop environments if the ref is a branch that is being deleted def stop_environments return unless removing_branch? Environments::StopService.new(project, current_user).execute_for_branch(branch_name) end def unlock_artifacts return unless removing_branch? Ci::RefDeleteUnlockArtifactsWorker.perform_async(project.id, current_user.id, ref) end def execute_related_hooks BranchHooksService.new(project, current_user, params).execute end def removing_branch? Gitlab::Git.blank_ref?(newrev) end def branch_name strong_memoize(:branch_name) { Gitlab::Git.ref_name(ref) } end def default_branch? strong_memoize(:default_branch) do [nil, branch_name].include?(project.default_branch) end end end end Git::BranchPushService.prepend_mod_with('Git::BranchPushService') ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Git::BranchPushService, :use_clean_rails_redis_caching, services: true, feature_category: :source_code_management do include RepoHelpers let_it_be(:user) { create(:user) } let_it_be_with_refind(:project) { create(:project, :repository) } let(:blankrev) { Gitlab::Git::BLANK_SHA } let(:oldrev) { sample_commit.parent_id } let(:newrev) { sample_commit.id } let(:branch) { 'master' } let(:ref) { "refs/heads/#{branch}" } let(:push_options) { nil } let(:service) do described_class .new(project, user, change: { oldrev: oldrev, newrev: newrev, ref: ref }, push_options: push_options) end before do project.add_maintainer(user) end subject(:execute_service) do service.execute end describe 'Push branches' do context 'new branch' do let(:oldrev) { blankrev } it { is_expected.to be_truthy } it 'calls the after_push_commit hook' do expect(project.repository).to receive(:after_push_commit).with('master') subject end it 'calls the after_create_branch hook' do expect(project.repository).to receive(:after_create_branch) subject end end context 'existing branch' do it { is_expected.to be_truthy } it 'calls the after_push_commit hook' do expect(project.repository).to receive(:after_push_commit).with('master') subject end end context 'rm branch' do let(:newrev) { blankrev } it { is_expected.to be_truthy } it 'calls the after_push_commit hook' do expect(project.repository).to receive(:after_push_commit).with('master') subject end it 'calls the after_remove_branch hook' do expect(project.repository).to receive(:after_remove_branch) subject end end end describe "Pipelines" do before do stub_ci_pipeline_to_return_yaml_file end it 'creates a pipeline with the right parameters' do expect(Ci::CreatePipelineService).to receive(:new).with( project, user, { before: oldrev, after: newrev, ref: ref, checkout_sha: SeedRepo::Commit::ID, variables_attributes: [], push_options: {} } ).and_call_original subject end it "creates a new pipeline" do expect { subject }.to change { Ci::Pipeline.count } pipeline = Ci::Pipeline.last expect(pipeline).to be_push expect(Gitlab::Git::BRANCH_REF_PREFIX + pipeline.ref).to eq(ref) end context 'when pipeline has errors' do before do config = YAML.dump({ test: { script: 'ls', only: ['feature'] } }) stub_ci_pipeline_yaml_file(config) end it 'reports an error' do allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true) expect(Sidekiq.logger).to receive(:warn) expect { subject }.not_to change { Ci::Pipeline.count } end context 'with push options' do let(:push_options) { { 'mr' => { 'create' => true } } } it 'sanitizes push options' do allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true) expect(Sidekiq.logger).to receive(:warn) do |args| pipeline_params = args[:pipeline_params] expect(pipeline_params.keys).to match_array(%i[before after ref variables_attributes checkout_sha]) end expect { subject }.not_to change { Ci::Pipeline.count } end end end context 'when .gitlab-ci.yml file is invalid' do before do stub_ci_pipeline_yaml_file('invalid yaml file') end it 'persists an error pipeline' do expect { subject }.to change { Ci::Pipeline.count } pipeline = Ci::Pipeline.last expect(pipeline).to be_push expect(pipeline).to be_failed expect(pipeline).to be_config_error end end end describe "Updates merge requests" do let(:oldrev) { blankrev } it "when pushing a new branch for the first time" do expect(UpdateMergeRequestsWorker) .to receive(:perform_async) .with(project.id, user.id, blankrev, newrev, ref, { 'push_options' => nil }) .ordered subject end end describe "Updates git attributes" do context "for default branch" do context "when first push" do let(:oldrev) { blankrev } it "calls the copy attributes method for the first push to the default branch" do expect(project.repository).to receive(:copy_gitattributes).with('master') subject end end it "calls the copy attributes method for changes to the default branch" do expect(project.repository).to receive(:copy_gitattributes).with(ref) subject end end context "for non-default branch" do before do # Make sure the "default" branch is different allow(project).to receive(:default_branch).and_return('not-master') end it "does not call copy attributes method" do expect(project.repository).not_to receive(:copy_gitattributes) subject end end end describe "Webhooks" do before do create(:project_hook, push_events: true, project: project) end context "when pushing a branch for the first time" do let(:oldrev) { blankrev } it "executes webhooks" do expect(project).to receive(:execute_hooks) expect(project.default_branch).to eq("master") subject expect(project.protected_branches).not_to be_empty expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER]) expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER]) end it "with default branch protection disabled" do expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_NONE) expect(project).to receive(:execute_hooks) expect(project.default_branch).to eq("master") subject expect(project.protected_branches).to be_empty end it "with default branch protection set to 'developers can push'" do expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_PUSH) expect(project).to receive(:execute_hooks) expect(project.default_branch).to eq("master") subject expect(project.protected_branches).not_to be_empty expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER]) expect(project.protected_branches.last.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER]) end it "with an existing branch permission configured" do expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_PUSH) create(:protected_branch, :no_one_can_push, :developers_can_merge, project: project, name: 'master') expect(project).to receive(:execute_hooks) expect(project.default_branch).to eq("master") expect(ProtectedBranches::CreateService).not_to receive(:new) subject expect(project.protected_branches).not_to be_empty expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::NO_ACCESS]) expect(project.protected_branches.last.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER]) end it "with default branch protection set to 'developers can merge'" do expect(project.namespace).to receive(:default_branch_protection).and_return(Gitlab::Access::PROTECTION_DEV_CAN_MERGE) expect(project).to receive(:execute_hooks) expect(project.default_branch).to eq("master") subject expect(project.protected_branches).not_to be_empty expect(project.protected_branches.first.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER]) expect(project.protected_branches.first.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::DEVELOPER]) end end context "when pushing new commits to existing branch" do it "executes webhooks" do expect(project).to receive(:execute_hooks) subject end end end describe "cross-reference notes" do let(:issue) { create :issue, project: project } let(:commit_author) { create :user } let(:commit) { project.commit } before do project.add_developer(commit_author) project.add_developer(user) allow(commit).to receive_messages( safe_message: "this commit \n mentions #{issue.to_reference}", references: [issue], author_name: commit_author.name, author_email: commit_author.email ) allow(Commit).to receive(:build_from_sidekiq_hash) .and_return(commit) allow(project.repository).to receive(:commits_between).and_return([commit]) end it "creates a note if a pushed commit mentions an issue", :sidekiq_might_not_need_inline do expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author) subject end it "only creates a cross-reference note if one doesn't already exist" do SystemNoteService.cross_reference(issue, commit, user) expect(SystemNoteService).not_to receive(:cross_reference).with(issue, commit, commit_author) subject end it "defaults to the pushing user if the commit's author is not known", :sidekiq_inline, :use_clean_rails_redis_caching do allow(commit).to receive_messages( author_name: 'unknown name', author_email: '[email protected]' ) expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, user) subject end context "when first push on a non-default branch" do let(:oldrev) { blankrev } let(:ref) { 'refs/heads/other' } it "finds references", :sidekiq_might_not_need_inline do allow(project.repository).to receive(:commits_between).with(blankrev, newrev).and_return([]) allow(project.repository).to receive(:commits_between).with("master", newrev).and_return([commit]) expect(SystemNoteService).to receive(:cross_reference).with(issue, commit, commit_author) subject end end end describe "issue metrics" do let(:issue) { create :issue, project: project } let(:commit_author) { create :user } let(:commit) { project.commit } let(:commit_time) { Time.current } before do project.add_developer(commit_author) project.add_developer(user) allow(commit).to receive_messages( safe_message: "this commit \n mentions #{issue.to_reference}", references: [issue], author_name: commit_author.name, author_email: commit_author.email, committed_date: commit_time ) allow(Commit).to receive(:build_from_sidekiq_hash) .and_return(commit) allow(project.repository).to receive(:commits_between).and_return([commit]) end context "while saving the 'first_mentioned_in_commit_at' metric for an issue" do it 'sets the metric for referenced issues', :sidekiq_inline, :use_clean_rails_redis_caching do subject expect(issue.reload.metrics.first_mentioned_in_commit_at).to be_like_time(commit_time) end it 'does not set the metric for non-referenced issues' do non_referenced_issue = create(:issue, project: project) subject expect(non_referenced_issue.reload.metrics.first_mentioned_in_commit_at).to be_nil end end end describe "closing issues from pushed commits containing a closing reference" do let(:issue) { create :issue, project: project } let(:other_issue) { create :issue, project: project } let(:commit_author) { create :user } let(:closing_commit) { project.commit } before do allow(closing_commit).to receive_messages( issue_closing_regex: /^([Cc]loses|[Ff]ixes) #\d+/, safe_message: "this is some work.\n\ncloses ##{issue.iid}", author_name: commit_author.name, author_email: commit_author.email ) allow(project.repository).to receive(:commits_between) .and_return([closing_commit]) allow(Commit).to receive(:build_from_sidekiq_hash) .and_return(closing_commit) project.add_maintainer(commit_author) end context "to default branches" do let(:user) { commit_author } it "closes issues", :sidekiq_might_not_need_inline do subject expect(Issue.find(issue.id)).to be_closed end it "adds a note indicating that the issue is now closed", :sidekiq_might_not_need_inline do expect(SystemNoteService).to receive(:change_status).with(issue, project, commit_author, "closed", closing_commit) subject end it "doesn't create additional cross-reference notes" do expect(SystemNoteService).not_to receive(:cross_reference) subject end end context "to non-default branches" do before do # Make sure the "default" branch is different allow(project).to receive(:default_branch).and_return('not-master') end it "creates cross-reference notes", :sidekiq_inline, :use_clean_rails_redis_caching do expect(SystemNoteService).to receive(:cross_reference).with(issue, closing_commit, commit_author) subject end it "doesn't close issues" do subject expect(Issue.find(issue.id)).to be_opened end end context "for jira issue tracker" do include JiraIntegrationHelpers let(:jira_tracker) { project.create_jira_integration if project.jira_integration.nil? } before do # project.create_jira_integration doesn't seem to invalidate the cache here project.has_external_issue_tracker = true stub_jira_integration_test jira_integration_settings stub_jira_urls("JIRA-1") allow(closing_commit).to receive_messages({ issue_closing_regex: Regexp.new(Gitlab.config.gitlab.issue_closing_pattern), safe_message: message, author_name: commit_author.name, author_email: commit_author.email }) allow(JIRA::Resource::Remotelink).to receive(:all).and_return([]) allow(project.repository).to receive_messages(commits_between: [closing_commit]) end after do jira_tracker.destroy! end context "mentioning an issue" do let(:message) { "this is some work.\n\nrelated to JIRA-1" } it "initiates one api call to jira server to mention the issue", :sidekiq_inline, :use_clean_rails_redis_caching do subject expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with( body: /mentioned this issue in/ ).once end end context "closing an issue" do let(:message) { "this is some work.\n\ncloses JIRA-1" } let(:comment_body) do { body: "Issue solved with [#{closing_commit.id}|http://#{Gitlab.config.gitlab.host}/#{project.full_path}/-/commit/#{closing_commit.id}]." }.to_json end before do open_issue = JIRA::Resource::Issue.new(jira_tracker.client, attrs: { "id" => "JIRA-1" }) closed_issue = open_issue.dup allow(open_issue).to receive(:resolution).and_return(false) allow(closed_issue).to receive(:resolution).and_return(true) allow(JIRA::Resource::Issue).to receive(:find).and_return(open_issue, closed_issue) allow_any_instance_of(JIRA::Resource::Issue).to receive(:key).and_return("JIRA-1") end context "using right markdown", :sidekiq_might_not_need_inline do let(:user) { commit_author } it "initiates one api call to jira server to close the issue" do subject expect(WebMock).to have_requested(:post, jira_api_transition_url('JIRA-1')).once end it "initiates one api call to jira server to comment on the issue" do subject expect(WebMock) .to have_requested(:post, jira_api_comment_url('JIRA-1')) .with(body: comment_body) .once end end context "using internal issue reference" do let(:user) { commit_author } context 'when internal issues are disabled' do before do project.issues_enabled = false project.save! end let(:message) { "this is some work.\n\ncloses #1" } it "does not initiates one api call to jira server to close the issue" do subject expect(WebMock).not_to have_requested(:post, jira_api_transition_url('JIRA-1')) end it "does not initiates one api call to jira server to comment on the issue" do subject expect(WebMock).not_to have_requested(:post, jira_api_comment_url('JIRA-1')).with( body: comment_body ).once end end context 'when internal issues are enabled', :sidekiq_might_not_need_inline do let(:issue) { create(:issue, project: project) } let(:message) { "this is some work.\n\ncloses JIRA-1 \n\n closes #{issue.to_reference}" } it "initiates one api call to jira server to close the jira issue" do subject expect(WebMock).to have_requested(:post, jira_api_transition_url('JIRA-1')).once end it "initiates one api call to jira server to comment on the jira issue" do subject expect(WebMock).to have_requested(:post, jira_api_comment_url('JIRA-1')).with( body: comment_body ).once end it "closes the internal issue" do subject expect(issue.reload).to be_closed end it "adds a note indicating that the issue is now closed" do expect(SystemNoteService).to receive(:change_status) .with(issue, project, commit_author, "closed", closing_commit) subject end end end end end end describe "empty project" do let(:project) { create(:project_empty_repo) } let(:ref) { 'refs/heads/feature' } let(:oldrev) { blankrev } before do allow(project).to receive(:default_branch).and_return('feature') expect(project).to receive(:change_head) { 'feature' } end it 'push to first branch updates HEAD' do subject end end describe "CI environments" do context 'create branch' do let(:oldrev) { blankrev } it 'does nothing' do expect(::Environments::StopService).not_to receive(:new) subject end end context 'update branch' do it 'does nothing' do expect(::Environments::StopService).not_to receive(:new) subject end end context 'delete branch' do let(:newrev) { blankrev } it 'stops environments' do expect_next_instance_of(::Environments::StopService) do |stop_service| expect(stop_service.project).to eq(project) expect(stop_service.current_user).to eq(user) expect(stop_service).to receive(:execute_for_branch).with(branch) end subject end end end describe 'artifacts' do context 'create branch' do let(:oldrev) { blankrev } it 'does nothing' do expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async) subject end end context 'update branch' do it 'does nothing' do expect(::Ci::RefDeleteUnlockArtifactsWorker).not_to receive(:perform_async) subject end end context 'delete branch' do let(:newrev) { blankrev } it 'unlocks artifacts' do expect(::Ci::RefDeleteUnlockArtifactsWorker) .to receive(:perform_async).with(project.id, user.id, "refs/heads/#{branch}") subject end end end describe 'Hooks' do context 'run on a branch' do it 'delegates to Git::BranchHooksService' do expect_next_instance_of(::Git::BranchHooksService) do |hooks_service| expect(hooks_service.project).to eq(project) expect(hooks_service.current_user).to eq(user) expect(hooks_service.params).to include( change: { oldrev: oldrev, newrev: newrev, ref: ref } ) expect(hooks_service).to receive(:execute) end subject end end context 'run on a tag' do let(:ref) { 'refs/tags/v1.1.0' } it 'does nothing' do expect(::Git::BranchHooksService).not_to receive(:new) subject end end end context 'Jira Connect hooks' do let(:branch_to_sync) { nil } let(:commits_to_sync) { [] } shared_examples 'enqueues Jira sync worker' do specify :aggregate_failures do Sidekiq::Testing.fake! do if commits_to_sync.any? expect(JiraConnect::SyncBranchWorker) .to receive(:perform_in) .with(kind_of(Numeric), project.id, branch_to_sync, commits_to_sync, kind_of(Numeric)) .and_call_original else expect(JiraConnect::SyncBranchWorker) .to receive(:perform_async) .with(project.id, branch_to_sync, commits_to_sync, kind_of(Numeric)) .and_call_original end expect { subject }.to change(JiraConnect::SyncBranchWorker.jobs, :size).by(1) end end end shared_examples 'does not enqueue Jira sync worker' do specify do Sidekiq::Testing.fake! do expect { subject }.not_to change(JiraConnect::SyncBranchWorker.jobs, :size) end end end context 'with a Jira subscription' do before do create(:jira_connect_subscription, namespace: project.namespace) end context 'branch name contains Jira issue key' do let(:branch_to_sync) { 'branch-JIRA-123' } let(:ref) { "refs/heads/#{branch_to_sync}" } it_behaves_like 'enqueues Jira sync worker' end context 'commit message contains Jira issue key' do let(:commits_to_sync) { [newrev] } before do allow_any_instance_of(Commit).to receive(:safe_message).and_return('Commit with key JIRA-123') end it_behaves_like 'enqueues Jira sync worker' describe 'batch requests' do let(:commits_to_sync) { [sample_commit.id, another_sample_commit.id] } it 'enqueues multiple jobs' do # We have to stub this as we only have two valid commits to use stub_const('Git::BranchHooksService::JIRA_SYNC_BATCH_SIZE', 1) expect_any_instance_of(Git::BranchHooksService).to receive(:filtered_commit_shas).and_return(commits_to_sync) expect(JiraConnect::SyncBranchWorker) .to receive(:perform_in) .with(0.seconds, project.id, branch_to_sync, [commits_to_sync.first], kind_of(Numeric)) .and_call_original expect(JiraConnect::SyncBranchWorker) .to receive(:perform_in) .with(10.seconds, project.id, branch_to_sync, [commits_to_sync.last], kind_of(Numeric)) .and_call_original subject end end end context 'branch name and commit message does not contain Jira issue key' do it_behaves_like 'does not enqueue Jira sync worker' end end context 'without a Jira subscription' do it_behaves_like 'does not enqueue Jira sync worker' end end describe 'project target platforms detection' do let(:oldrev) { blankrev } it 'calls enqueue_record_project_target_platforms on the project' do expect(project).to receive(:enqueue_record_project_target_platforms) subject end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Git class WikiPushService class Change include Gitlab::Utils::StrongMemoize # @param [Wiki] wiki # @param [Hash] change - must have keys `:oldrev` and `:newrev` # @param [Gitlab::Git::RawDiffChange] raw_change def initialize(wiki, change, raw_change) @wiki = wiki @raw_change = raw_change @change = change end def page strong_memoize(:page) { wiki.find_page(slug, revision) } end # See [Gitlab::Git::RawDiffChange#extract_operation] for the # definition of the full range of operation values. def event_action case raw_change.operation when :added :created when :deleted :destroyed else :updated end end def last_known_slug strip_extension(raw_change.old_path || raw_change.new_path) end def sha change[:newrev] end private attr_reader :raw_change, :change, :wiki def filename return raw_change.old_path if deleted? raw_change.new_path end def slug strip_extension(filename) end def revision return change[:oldrev] if deleted? change[:newrev] end def deleted? raw_change.operation == :deleted end def strip_extension(filename) return unless filename encoded_filename = Gitlab::EncodingHelper.encode_utf8(filename.dup) File.basename(encoded_filename, File.extname(encoded_filename)) end end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Git::WikiPushService::Change, feature_category: :source_code_management do subject { described_class.new(project_wiki, change, raw_change) } let(:project_wiki) { double('ProjectWiki') } let(:raw_change) { double('RawChange', new_path: new_path, old_path: old_path, operation: operation) } let(:change) { { oldrev: generate(:sha), newrev: generate(:sha) } } let(:new_path) do case operation when :deleted nil else generate(:wiki_filename) end end let(:old_path) do case operation when :added nil when :deleted, :renamed generate(:wiki_filename) else new_path end end describe '#page' do context 'the page does not exist' do before do expect(project_wiki).to receive(:find_page).with(String, String).and_return(nil) end %i[added deleted renamed modified].each do |op| context "the operation is #{op}" do let(:operation) { op } it { is_expected.to have_attributes(page: be_nil) } end end end context 'the page can be found' do let(:wiki_page) { double('WikiPage') } before do expect(project_wiki).to receive(:find_page).with(slug, revision).and_return(wiki_page) end context 'the page has been deleted' do let(:operation) { :deleted } let(:slug) { old_path.chomp('.md') } let(:revision) { change[:oldrev] } it { is_expected.to have_attributes(page: wiki_page) } end %i[added renamed modified].each do |op| context "the operation is #{op}" do let(:operation) { op } let(:slug) { new_path.chomp('.md') } let(:revision) { change[:newrev] } it { is_expected.to have_attributes(page: wiki_page) } end end end end describe '#last_known_slug' do context 'the page has been created' do let(:operation) { :added } it { is_expected.to have_attributes(last_known_slug: new_path.chomp('.md')) } end %i[renamed modified deleted].each do |op| context "the operation is #{op}" do let(:operation) { op } it { is_expected.to have_attributes(last_known_slug: old_path.chomp('.md')) } end end end describe '#event_action' do context 'the page is deleted' do let(:operation) { :deleted } it { is_expected.to have_attributes(event_action: :destroyed) } end context 'the page is added' do let(:operation) { :added } it { is_expected.to have_attributes(event_action: :created) } end %i[renamed modified].each do |op| context "the page is #{op}" do let(:operation) { op } it { is_expected.to have_attributes(event_action: :updated) } end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Onboarding class ProgressService class Async attr_reader :namespace_id def initialize(namespace_id) @namespace_id = namespace_id end def execute(action:) return unless Onboarding::Progress.not_completed?(namespace_id, action) Onboarding::ProgressWorker.perform_async(namespace_id, action) end end def self.async(namespace_id) Async.new(namespace_id) end def initialize(namespace) @namespace = namespace&.root_ancestor end def execute(action:) return unless @namespace Onboarding::Progress.register(@namespace, action) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Onboarding::ProgressService, feature_category: :onboarding do describe '.async' do let_it_be(:namespace) { create(:namespace) } let_it_be(:action) { :git_pull } subject(:execute_service) { described_class.async(namespace.id).execute(action: action) } context 'when not onboarded' do it 'does not schedule a worker' do expect(Onboarding::ProgressWorker).not_to receive(:perform_async) execute_service end end context 'when onboarded' do before do Onboarding::Progress.onboard(namespace) end context 'when action is already completed' do before do Onboarding::Progress.register(namespace, action) end it 'does not schedule a worker' do expect(Onboarding::ProgressWorker).not_to receive(:perform_async) execute_service end end context 'when action is not yet completed' do it 'schedules a worker' do expect(Onboarding::ProgressWorker).to receive(:perform_async) execute_service end end end end describe '#execute' do let(:namespace) { create(:namespace) } let(:action) { :namespace_action } subject(:execute_service) { described_class.new(namespace).execute(action: :subscription_created) } context 'when the namespace is a root' do before do Onboarding::Progress.onboard(namespace) end it 'registers a namespace onboarding progress action for the given namespace' do execute_service expect(Onboarding::Progress.completed?(namespace, :subscription_created)).to eq(true) end end context 'when the namespace is not the root' do let(:group) { create(:group, :nested) } before do Onboarding::Progress.onboard(group) end it 'does not register a namespace onboarding progress action' do execute_service expect(Onboarding::Progress.completed?(group, :subscription_created)).to be(nil) end end context 'when no namespace is passed' do let(:namespace) { nil } it 'does not register a namespace onboarding progress action' do execute_service expect(Onboarding::Progress.completed?(namespace, :subscription_created)).to be(nil) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module PagesDomains class CreateAcmeOrderService attr_reader :pages_domain def initialize(pages_domain) @pages_domain = pages_domain end def execute lets_encrypt_client = Gitlab::LetsEncrypt::Client.new order = lets_encrypt_client.new_order(pages_domain.domain) challenge = order.new_challenge private_key = OpenSSL::PKey::RSA.new(4096) saved_order = pages_domain.acme_orders.create!( url: order.url, expires_at: order.expires, private_key: private_key.to_pem, challenge_token: challenge.token, challenge_file_content: challenge.file_content ) challenge.request_validation saved_order end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe PagesDomains::CreateAcmeOrderService, feature_category: :pages do include LetsEncryptHelpers let(:pages_domain) { create(:pages_domain) } let(:challenge) { ::Gitlab::LetsEncrypt::Challenge.new(acme_challenge_double) } let(:order_double) do Gitlab::LetsEncrypt::Order.new(acme_order_double).tap do |order| allow(order).to receive(:new_challenge).and_return(challenge) end end let(:lets_encrypt_client) do instance_double('Gitlab::LetsEncrypt::Client').tap do |client| allow(client).to receive(:new_order).with(pages_domain.domain) .and_return(order_double) end end let(:service) { described_class.new(pages_domain) } before do allow(::Gitlab::LetsEncrypt::Client).to receive(:new).and_return(lets_encrypt_client) end it 'saves order to database before requesting validation' do allow(pages_domain.acme_orders).to receive(:create!).and_call_original allow(challenge).to receive(:request_validation).and_call_original service.execute expect(pages_domain.acme_orders).to have_received(:create!).ordered expect(challenge).to have_received(:request_validation).ordered end it 'generates and saves private key' do service.execute saved_order = PagesDomainAcmeOrder.last expect { OpenSSL::PKey::RSA.new(saved_order.private_key) }.not_to raise_error end it 'properly saves order attributes' do service.execute saved_order = PagesDomainAcmeOrder.last expect(saved_order.url).to eq(order_double.url) expect(saved_order.expires_at).to be_like_time(order_double.expires) end it 'properly saves challenge attributes' do service.execute saved_order = PagesDomainAcmeOrder.last expect(saved_order.challenge_token).to eq(challenge.token) expect(saved_order.challenge_file_content).to eq(challenge.file_content) end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module PagesDomains class RetryAcmeOrderService attr_reader :pages_domain def initialize(pages_domain) @pages_domain = pages_domain end def execute updated = pages_domain.with_lock do next unless pages_domain.auto_ssl_enabled && pages_domain.auto_ssl_failed pages_domain.update!(auto_ssl_failed: false) end return unless updated PagesDomainSslRenewalWorker.perform_async(pages_domain.id) publish_event(pages_domain) end private def publish_event(domain) event = PagesDomainUpdatedEvent.new( data: { project_id: domain.project.id, namespace_id: domain.project.namespace_id, root_namespace_id: domain.project.root_namespace.id, domain_id: domain.id, domain: domain.domain } ) Gitlab::EventStore.publish(event) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe PagesDomains::RetryAcmeOrderService, feature_category: :pages do let_it_be(:project) { create(:project) } let(:domain) { create(:pages_domain, project: project, auto_ssl_enabled: true, auto_ssl_failed: true) } let(:service) { described_class.new(domain) } it 'clears auto_ssl_failed' do expect { service.execute } .to change { domain.auto_ssl_failed } .from(true).to(false) .and publish_event(PagesDomains::PagesDomainUpdatedEvent) .with( project_id: project.id, namespace_id: project.namespace.id, root_namespace_id: project.root_namespace.id, domain_id: domain.id, domain: domain.domain ) end it 'schedules renewal worker and publish PagesDomainUpdatedEvent event' do expect(PagesDomainSslRenewalWorker).to receive(:perform_async).with(domain.id).and_return(nil).once expect { service.execute } .to publish_event(PagesDomains::PagesDomainUpdatedEvent) .with( project_id: project.id, namespace_id: project.namespace.id, root_namespace_id: project.root_namespace.id, domain_id: domain.id, domain: domain.domain ) end it "doesn't schedule renewal worker if Let's Encrypt integration is not enabled" do domain.update!(auto_ssl_enabled: false) expect(PagesDomainSslRenewalWorker).not_to receive(:new) expect { service.execute } .to not_publish_event(PagesDomains::PagesDomainUpdatedEvent) end it "doesn't schedule renewal worker if auto ssl has not failed yet" do domain.update!(auto_ssl_failed: false) expect(PagesDomainSslRenewalWorker).not_to receive(:new) expect { service.execute } .to not_publish_event(PagesDomains::PagesDomainUpdatedEvent) end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module PagesDomains class UpdateService < BaseService def execute(domain) return unless authorized? return false unless domain.update(params) publish_event(domain) true end private def authorized? current_user.can?(:update_pages, project) end def publish_event(domain) event = PagesDomainUpdatedEvent.new( data: { project_id: project.id, namespace_id: project.namespace_id, root_namespace_id: project.root_namespace.id, domain_id: domain.id, domain: domain.domain } ) Gitlab::EventStore.publish(event) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe PagesDomains::UpdateService, feature_category: :pages do let_it_be(:user) { create(:user) } let_it_be(:pages_domain) { create(:pages_domain, :with_project) } let(:params) do attributes_for(:pages_domain, :with_trusted_chain).slice(:key, :certificate).tap do |params| params[:user_provided_key] = params.delete(:key) params[:user_provided_certificate] = params.delete(:certificate) end end subject(:service) { described_class.new(pages_domain.project, user, params) } context 'when the user does not have the required permissions' do it 'does not update the pages domain and does not publish a PagesDomainUpdatedEvent' do expect do expect(service.execute(pages_domain)).to be_nil end.to not_publish_event(PagesDomains::PagesDomainUpdatedEvent) end end context 'when the user has the required permissions' do before do pages_domain.project.add_maintainer(user) end context 'when it updates the domain successfully' do it 'updates the domain' do expect(service.execute(pages_domain)).to eq(true) end it 'publishes a PagesDomainUpdatedEvent' do expect { service.execute(pages_domain) } .to publish_event(PagesDomains::PagesDomainUpdatedEvent) .with( project_id: pages_domain.project.id, namespace_id: pages_domain.project.namespace.id, root_namespace_id: pages_domain.project.root_namespace.id, domain_id: pages_domain.id, domain: pages_domain.domain ) end end context 'when it fails to update the domain' do let(:params) { { user_provided_certificate: 'blabla' } } it 'does not update a pages domain' do expect(service.execute(pages_domain)).to be(false) end it 'does not publish a PagesDomainUpdatedEvent' do expect { service.execute(pages_domain) } .not_to publish_event(PagesDomains::PagesDomainUpdatedEvent) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module PagesDomains class ObtainLetsEncryptCertificateService # time for processing validation requests for acme challenges # 5-15 seconds is usually enough CHALLENGE_PROCESSING_DELAY = 1.minute.freeze # time LetsEncrypt ACME server needs to generate the certificate # no particular SLA, usually takes 10-15 seconds CERTIFICATE_PROCESSING_DELAY = 1.minute.freeze attr_reader :pages_domain def initialize(pages_domain) @pages_domain = pages_domain end def execute pages_domain.acme_orders.expired.delete_all acme_order = pages_domain.acme_orders.first unless acme_order ::PagesDomains::CreateAcmeOrderService.new(pages_domain).execute PagesDomainSslRenewalWorker.perform_in(CHALLENGE_PROCESSING_DELAY, pages_domain.id) return end api_order = ::Gitlab::LetsEncrypt::Client.new.load_order(acme_order.url) # https://www.rfc-editor.org/rfc/rfc8555#section-7.1.6 - statuses diagram case api_order.status when 'ready' api_order.request_certificate(private_key: acme_order.private_key, domain: pages_domain.domain) PagesDomainSslRenewalWorker.perform_in(CERTIFICATE_PROCESSING_DELAY, pages_domain.id) when 'valid' save_certificate(acme_order.private_key, api_order) acme_order.destroy! when 'invalid' save_order_error(acme_order, api_order) end end private def save_certificate(private_key, api_order) certificate = api_order.certificate pages_domain.update!(gitlab_provided_key: private_key, gitlab_provided_certificate: certificate) end def save_order_error(acme_order, api_order) log_error(api_order) pages_domain.assign_attributes(auto_ssl_failed: true) pages_domain.save!(validate: false) acme_order.destroy! NotificationService.new.pages_domain_auto_ssl_failed(pages_domain) end def log_error(api_order) Gitlab::AppLogger.error( message: "Failed to obtain Let's Encrypt certificate", acme_error: api_order.challenge_error, project_id: pages_domain.project_id, pages_domain: pages_domain.domain ) rescue StandardError => e # getting authorizations is an additional network request which can raise errors Gitlab::ErrorTracking.track_exception(e) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe PagesDomains::ObtainLetsEncryptCertificateService, feature_category: :pages do include LetsEncryptHelpers let(:pages_domain) { create(:pages_domain, :without_certificate, :without_key) } let(:service) { described_class.new(pages_domain) } before do stub_lets_encrypt_settings end around do |example| Sidekiq::Testing.fake! do example.run end end def expect_to_create_acme_challenge expect(::PagesDomains::CreateAcmeOrderService).to receive(:new).with(pages_domain) .and_wrap_original do |m, *args| create_service = m.call(*args) expect(create_service).to receive(:execute) create_service end end def stub_lets_encrypt_order(url, status) order = ::Gitlab::LetsEncrypt::Order.new(acme_order_double(status: status)) allow_next_instance_of(::Gitlab::LetsEncrypt::Client) do |instance| allow(instance).to receive(:load_order).with(url).and_return(order) end order end context 'when there is no acme order' do it 'creates acme order and schedules next step' do expect_to_create_acme_challenge expect(PagesDomainSslRenewalWorker).to( receive(:perform_in).with(described_class::CHALLENGE_PROCESSING_DELAY, pages_domain.id) .and_return(nil).once ) service.execute end end context 'when there is expired acme order' do let!(:existing_order) do create(:pages_domain_acme_order, :expired, pages_domain: pages_domain) end it 'removes acme order and creates new one' do expect_to_create_acme_challenge service.execute expect(PagesDomainAcmeOrder.find_by_id(existing_order.id)).to be_nil end end %w[pending processing].each do |status| context "there is an order in '#{status}' status" do let(:existing_order) do create(:pages_domain_acme_order, pages_domain: pages_domain) end before do stub_lets_encrypt_order(existing_order.url, status) end it 'does not raise errors' do expect do service.execute end.not_to raise_error end end end context 'when order is ready' do let(:existing_order) do create(:pages_domain_acme_order, pages_domain: pages_domain) end let!(:api_order) do stub_lets_encrypt_order(existing_order.url, 'ready') end it 'request certificate and schedules next step' do expect(api_order).to receive(:request_certificate).and_call_original expect(PagesDomainSslRenewalWorker).to( receive(:perform_in).with(described_class::CERTIFICATE_PROCESSING_DELAY, pages_domain.id) .and_return(nil).once ) service.execute end end context 'when order is valid' do let(:existing_order) do create(:pages_domain_acme_order, pages_domain: pages_domain) end let!(:api_order) do stub_lets_encrypt_order(existing_order.url, 'valid') end let(:certificate) do key = OpenSSL::PKey.read(existing_order.private_key) subject = "/C=BE/O=Test/OU=Test/CN=#{pages_domain.domain}" cert = OpenSSL::X509::Certificate.new cert.subject = cert.issuer = OpenSSL::X509::Name.parse(subject) cert.not_before = Time.current cert.not_after = 1.year.from_now cert.public_key = key.public_key cert.serial = 0x0 cert.version = 2 ef = OpenSSL::X509::ExtensionFactory.new ef.subject_certificate = cert ef.issuer_certificate = cert cert.extensions = [ ef.create_extension("basicConstraints", "CA:TRUE", true), ef.create_extension("subjectKeyIdentifier", "hash") ] cert.add_extension ef.create_extension("authorityKeyIdentifier", "keyid:always,issuer:always") cert.sign key, OpenSSL::Digest.new('SHA256') cert.to_pem end before do expect(api_order).to receive(:certificate) { certificate } end it 'saves private_key and certificate for domain' do service.execute expect(pages_domain.key).to be_present expect(pages_domain.certificate).to eq(certificate) end it 'marks certificate as gitlab_provided' do service.execute expect(pages_domain.certificate_source).to eq("gitlab_provided") end it 'removes order from database' do service.execute expect(PagesDomainAcmeOrder.find_by_id(existing_order.id)).to be_nil end end context 'when order is invalid' do let(:existing_order) do create(:pages_domain_acme_order, pages_domain: pages_domain) end let!(:api_order) do stub_lets_encrypt_order(existing_order.url, 'invalid') end it 'saves error to domain and deletes acme order' do expect do service.execute end.to change { pages_domain.reload.auto_ssl_failed }.from(false).to(true) expect(PagesDomainAcmeOrder.find_by_id(existing_order.id)).to be_nil end it 'sends notification' do expect_next_instance_of(NotificationService) do |notification_service| expect(notification_service).to receive(:pages_domain_auto_ssl_failed).with(pages_domain) end service.execute end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module PagesDomains class CreateService < BaseService def execute return unless authorized? domain = project.pages_domains.create(params) publish_event(domain) if domain.persisted? domain end private def authorized? current_user.can?(:update_pages, project) end def publish_event(domain) event = PagesDomainCreatedEvent.new( data: { project_id: project.id, namespace_id: project.namespace_id, root_namespace_id: project.root_namespace.id, domain_id: domain.id, domain: domain.domain } ) Gitlab::EventStore.publish(event) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ::PagesDomains::CreateService, feature_category: :pages do let_it_be(:user) { create(:user) } let_it_be(:project) { create(:project, :in_subgroup) } let(:domain) { 'new.domain.com' } let(:attributes) { { domain: domain } } subject(:service) { described_class.new(project, user, attributes) } context 'when the user does not have the required permissions' do it 'does not create a pages domain and does not publish a PagesDomainCreatedEvent' do expect(service.execute).to be_nil expect { service.execute } .to not_publish_event(PagesDomains::PagesDomainCreatedEvent) .and not_change(project.pages_domains, :count) end end context 'when the user has the required permissions' do before do project.add_maintainer(user) end context 'when it saves the domain successfully' do it 'creates the domain and publishes a PagesDomainCreatedEvent' do pages_domain = nil expect { pages_domain = service.execute } .to change(project.pages_domains, :count) .and publish_event(PagesDomains::PagesDomainCreatedEvent) .with( project_id: project.id, namespace_id: project.namespace.id, root_namespace_id: project.root_namespace.id, domain_id: kind_of(Numeric), domain: domain ) expect(pages_domain).to be_persisted end end context 'when it fails to save the domain' do let(:domain) { nil } it 'does not create a pages domain and does not publish a PagesDomainCreatedEvent' do pages_domain = nil expect { pages_domain = service.execute } .to not_publish_event(PagesDomains::PagesDomainCreatedEvent) .and not_change(project.pages_domains, :count) expect(pages_domain).not_to be_persisted end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module PagesDomains class DeleteService < BaseService def execute(domain) return unless authorized? domain.destroy publish_event(domain) end private def authorized? current_user.can?(:update_pages, project) end def publish_event(domain) event = PagesDomainDeletedEvent.new( data: { project_id: project.id, namespace_id: project.namespace_id, root_namespace_id: project.root_namespace.id, domain_id: domain.id, domain: domain.domain } ) Gitlab::EventStore.publish(event) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ::PagesDomains::DeleteService, feature_category: :pages do let_it_be(:user) { create(:user) } let_it_be(:pages_domain) { create(:pages_domain, :with_project) } let(:params) do attributes_for(:pages_domain, :with_trusted_chain).slice(:key, :certificate).tap do |params| params[:user_provided_key] = params.delete(:key) params[:user_provided_certificate] = params.delete(:certificate) end end subject(:service) { described_class.new(pages_domain.project, user, params) } context 'when the user does not have the required permissions' do it 'does not delete the pages domain and does not publish a PagesDomainDeletedEvent' do result_match = -> { expect(service.execute(pages_domain)).to be_nil } expect(&result_match) .to not_publish_event(PagesDomains::PagesDomainDeletedEvent) end end context 'when the user has the required permissions' do before do pages_domain.project.add_maintainer(user) end context 'when it updates the domain successfully' do it 'deletes the domain and publishes a PagesDomainDeletedEvent' do result_match = -> { expect(service.execute(pages_domain)).not_to be_nil } expect(&result_match) .to publish_event(PagesDomains::PagesDomainDeletedEvent) .with( project_id: pages_domain.project.id, namespace_id: pages_domain.project.namespace.id, root_namespace_id: pages_domain.project.root_namespace.id, domain_id: pages_domain.id, domain: pages_domain.domain ) end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true # # Concern that helps with getting an exclusive lease for running a block # of code. # # `#try_obtain_lease` takes a block which will be run if it was able to # obtain the lease. Implement `#lease_timeout` to configure the timeout # for the exclusive lease. # # Optionally override `#lease_key` to set the # lease key, it defaults to the class name with underscores. # # Optionally override `#lease_release?` to prevent the job to # be re-executed more often than LEASE_TIMEOUT. # module ExclusiveLeaseGuard extend ActiveSupport::Concern def try_obtain_lease lease = exclusive_lease.try_obtain unless lease log_lease_taken return end begin yield lease ensure release_lease(lease) if lease_release? end end def exclusive_lease @lease ||= Gitlab::ExclusiveLease.new(lease_key, timeout: lease_timeout) end def lease_key @lease_key ||= self.class.name.underscore end def lease_timeout raise NotImplementedError, "#{self.class.name} does not implement #{__method__}" end def lease_release? true end def release_lease(uuid) Gitlab::ExclusiveLease.cancel(lease_key, uuid) end def renew_lease! exclusive_lease.renew end def log_lease_taken logger = Gitlab::AppJsonLogger args = { message: lease_taken_message, lease_key: lease_key, class_name: self.class.name, lease_timeout: lease_timeout } case lease_taken_log_level when :debug then logger.debug(args) when :info then logger.info(args) when :warn then logger.warn(args) else logger.error(args) end end def lease_taken_message "Cannot obtain an exclusive lease. There must be another instance already in execution." end def lease_taken_log_level :error end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe ExclusiveLeaseGuard, :clean_gitlab_redis_shared_state, feature_category: :shared do subject :subject_class do Class.new do include ExclusiveLeaseGuard def self.name 'ExclusiveLeaseGuardTestClass' end def call(&block) try_obtain_lease do internal_method(&block) end end def internal_method yield end def lease_timeout 1.second end end end describe '#try_obtain_lease' do let(:subject) { subject_class.new } it 'obtains the lease, calls internal_method and releases the lease', :aggregate_failures do expect(subject).to receive(:internal_method).and_call_original subject.call do expect(subject.exclusive_lease.exists?).to be_truthy end expect(subject.exclusive_lease.exists?).to be_falsey end context 'when the lease is already obtained' do before do subject.exclusive_lease.try_obtain end after do subject.exclusive_lease.cancel end context 'when the class does not override lease_taken_log_level' do it 'does not call internal_method but logs error', :aggregate_failures do expect(subject).not_to receive(:internal_method) expect(Gitlab::AppJsonLogger).to receive(:error).with({ message: "Cannot obtain an exclusive lease. There must be another instance already in execution.", lease_key: 'exclusive_lease_guard_test_class', class_name: 'ExclusiveLeaseGuardTestClass', lease_timeout: 1.second }) subject.call end end context 'when the class overrides lease_taken_log_level to return :info' do subject :overwritten_subject_class do Class.new(subject_class) do def lease_taken_log_level :info end end end let(:subject) { overwritten_subject_class.new } it 'logs info', :aggregate_failures do expect(Gitlab::AppJsonLogger).to receive(:info).with({ message: "Cannot obtain an exclusive lease. There must be another instance already in execution.", lease_key: 'exclusive_lease_guard_test_class', class_name: 'ExclusiveLeaseGuardTestClass', lease_timeout: 1.second }) subject.call end end context 'when the class overrides lease_taken_log_level to return :debug' do subject :overwritten_subject_class do Class.new(subject_class) do def lease_taken_log_level :debug end end end let(:subject) { overwritten_subject_class.new } it 'logs debug', :aggregate_failures do expect(Gitlab::AppJsonLogger).to receive(:debug).with({ message: "Cannot obtain an exclusive lease. There must be another instance already in execution.", lease_key: 'exclusive_lease_guard_test_class', class_name: 'ExclusiveLeaseGuardTestClass', lease_timeout: 1.second }) subject.call end end end context 'with overwritten lease_release?' do subject :overwritten_subject_class do Class.new(subject_class) do def lease_release? false end end end let(:subject) { overwritten_subject_class.new } it 'does not release the lease after execution', :aggregate_failures do subject.call do expect(subject.exclusive_lease.exists?).to be_truthy end expect(subject.exclusive_lease.exists?).to be_truthy end end end describe '#exclusive_lease' do it 'uses the class name as lease key' do expect(Gitlab::ExclusiveLease).to receive(:new).with('exclusive_lease_guard_test_class', timeout: 1.second) subject_class.new.exclusive_lease end context 'with overwritten lease_key' do subject :overwritten_class do Class.new(subject_class) do def lease_key 'other_lease_key' end end end it 'uses the custom lease key' do expect(Gitlab::ExclusiveLease).to receive(:new).with('other_lease_key', timeout: 1.second) overwritten_class.new.exclusive_lease end end end describe '#release_lease' do it 'sends a cancel message to ExclusiveLease' do expect(Gitlab::ExclusiveLease).to receive(:cancel).with('exclusive_lease_guard_test_class', 'some_uuid') subject_class.new.release_lease('some_uuid') end end describe '#renew_lease!' do let(:subject) { subject_class.new } it 'sends a renew message to the exclusive_lease instance' do expect(subject.exclusive_lease).to receive(:renew) subject.renew_lease! end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module RateLimitedService extend ActiveSupport::Concern RateLimitedNotSetupError = Class.new(StandardError) class RateLimitedError < StandardError def initialize(key:, rate_limiter:) @key = key @rate_limiter = rate_limiter end def headers # TODO: This will be fleshed out in https://gitlab.com/gitlab-org/gitlab/-/issues/342370 {} end def log_request(request, current_user) rate_limiter.log_request(request, "#{key}_request_limit".to_sym, current_user) end private attr_reader :key, :rate_limiter end class RateLimiterScopedAndKeyed attr_reader :key, :opts, :rate_limiter def initialize(key:, opts:, rate_limiter:) @key = key @opts = opts @rate_limiter = rate_limiter end def rate_limit!(service) evaluated_scope = evaluated_scope_for(service) if rate_limiter.throttled?(key, **opts.merge(scope: evaluated_scope.values, users_allowlist: users_allowlist)) raise RateLimitedError.new(key: key, rate_limiter: rate_limiter), _('This endpoint has been requested too many times. Try again later.') end end private def users_allowlist @users_allowlist ||= opts[:users_allowlist] ? opts[:users_allowlist].call : [] end def evaluated_scope_for(service) opts[:scope].index_with do |var| service.public_send(var) # rubocop: disable GitlabSecurity/PublicSend end end end prepended do attr_accessor :rate_limiter_bypassed cattr_accessor :rate_limiter_scoped_and_keyed def self.rate_limit(key:, opts:, rate_limiter: ::Gitlab::ApplicationRateLimiter) self.rate_limiter_scoped_and_keyed = RateLimiterScopedAndKeyed.new( key: key, opts: opts, rate_limiter: rate_limiter ) end end def execute_without_rate_limiting(*args, **kwargs) self.rate_limiter_bypassed = true execute(*args, **kwargs) ensure self.rate_limiter_bypassed = false end def execute(*args, **kwargs) raise RateLimitedNotSetupError if rate_limiter_scoped_and_keyed.nil? rate_limiter_scoped_and_keyed.rate_limit!(self) unless rate_limiter_bypassed super end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe RateLimitedService, feature_category: :rate_limiting do let(:key) { :issues_create } let(:scope) { [:container, :current_user] } let(:opts) { { scope: scope, users_allowlist: -> { [Users::Internal.support_bot.username] } } } let(:rate_limiter) { ::Gitlab::ApplicationRateLimiter } describe 'RateLimitedError' do subject { described_class::RateLimitedError.new(key: key, rate_limiter: rate_limiter) } describe '#headers' do it 'returns a Hash of HTTP headers' do # TODO: This will be fleshed out in https://gitlab.com/gitlab-org/gitlab/-/issues/342370 expected_headers = {} expect(subject.headers).to eq(expected_headers) end end describe '#log_request' do it 'logs the request' do request = instance_double(Grape::Request) user = instance_double(User) expect(rate_limiter).to receive(:log_request).with(request, "#{key}_request_limit".to_sym, user) subject.log_request(request, user) end end end describe 'RateLimiterScopedAndKeyed' do subject { described_class::RateLimiterScopedAndKeyed.new(key: key, opts: opts, rate_limiter: rate_limiter) } describe '#rate_limit!' do let_it_be(:project) { create(:project) } let_it_be(:current_user) { create(:user) } let(:service) { instance_double(Issues::CreateService, container: project, current_user: current_user) } let(:evaluated_scope) { [project, current_user] } let(:evaluated_opts) { { scope: evaluated_scope, users_allowlist: %w[support-bot] } } context 'when rate limiting is not in effect' do let(:throttled) { false } it 'does not raise an exception' do expect(subject.rate_limit!(service)).to be_nil end end context 'when rate limiting is in effect' do before do allow(rate_limiter).to receive(:throttled?).and_return(true) end it 'raises a RateLimitedError exception' do expect { subject.rate_limit!(service) }.to raise_error(described_class::RateLimitedError, 'This endpoint has been requested too many times. Try again later.') end end end end describe '#execute_without_rate_limiting' do let(:rate_limiter_scoped_and_keyed) { instance_double(RateLimitedService::RateLimiterScopedAndKeyed) } let(:subject) do local_key = key local_opts = opts Class.new do prepend RateLimitedService rate_limit key: local_key, opts: local_opts def execute(*args, **kwargs) 'main logic here' end end.new end before do allow(RateLimitedService::RateLimiterScopedAndKeyed).to receive(:new).with(key: key, opts: opts, rate_limiter: rate_limiter).and_return(rate_limiter_scoped_and_keyed) end context 'bypasses rate limiting' do it 'calls super' do expect(rate_limiter_scoped_and_keyed).not_to receive(:rate_limit!).with(subject) expect(subject.execute_without_rate_limiting).to eq('main logic here') end end end describe '#execute' do context 'when rate_limit has not been called' do let(:subject) { Class.new { prepend RateLimitedService }.new } it 'raises an RateLimitedNotSetupError exception' do expect { subject.execute }.to raise_error(described_class::RateLimitedNotSetupError) end end context 'when rate_limit has been called' do let(:rate_limiter_scoped_and_keyed) { instance_double(RateLimitedService::RateLimiterScopedAndKeyed) } let(:subject) do local_key = key local_opts = opts Class.new do prepend RateLimitedService rate_limit key: local_key, opts: local_opts def execute(*args, **kwargs) 'main logic here' end end.new end before do allow(RateLimitedService::RateLimiterScopedAndKeyed).to receive(:new).with(key: key, opts: opts, rate_limiter: rate_limiter).and_return(rate_limiter_scoped_and_keyed) end context 'and applies rate limiting' do it 'raises an RateLimitedService::RateLimitedError exception' do expect(rate_limiter_scoped_and_keyed).to receive(:rate_limit!).with(subject).and_raise(RateLimitedService::RateLimitedError.new(key: key, rate_limiter: rate_limiter)) expect { subject.execute }.to raise_error(RateLimitedService::RateLimitedError) end end context 'but does not apply rate limiting' do it 'calls super' do expect(rate_limiter_scoped_and_keyed).to receive(:rate_limit!).with(subject).and_return(nil) expect(subject.execute).to eq('main logic here') end end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module AuditEventSaveType SAVE_TYPES = { database: 0b01, stream: 0b10, database_and_stream: 0b11 }.freeze # def should_save_stream?(type) # def should_save_database?(type) [:database, :stream].each do |type| define_method("should_save_#{type}?") do |param_type| return false unless save_type_valid?(param_type) # If the current type does not support query, the result of the `&` operation is 0 . SAVE_TYPES[param_type] & SAVE_TYPES[type] != 0 end end private def save_type_valid?(type) SAVE_TYPES.key?(type) end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe AuditEventSaveType, feature_category: :audit_events do subject(:target) { Object.new.extend(described_class) } describe '#should_save_database? and #should_save_stream?' do using RSpec::Parameterized::TableSyntax where(:query_method, :query_param, :result) do :should_save_stream? | :stream | true :should_save_stream? | :database_and_stream | true :should_save_database? | :database | true :should_save_database? | :database_and_stream | true :should_save_stream? | :database | false :should_save_stream? | nil | false :should_save_database? | :stream | false :should_save_database? | nil | false end with_them do it 'returns corresponding results according to the query_method and query_param' do expect(target.send(query_method, query_param)).to eq result end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module MergeRequests module AssignsMergeParams def self.included(klass) raise "#{self} can not be included in #{klass} without implementing #current_user" unless klass.method_defined?(:current_user) end def assign_allowed_merge_params(merge_request, merge_params) known_merge_params = merge_params.to_h.with_indifferent_access.slice(*MergeRequest::KNOWN_MERGE_PARAMS) # Not checking `MergeRequest#can_remove_source_branch` as that includes # other checks that aren't needed here. known_merge_params.delete(:force_remove_source_branch) unless current_user.can?(:push_code, merge_request.source_project) merge_request.merge_params.merge!(known_merge_params) # Delete the known params now that they're assigned, so we don't try to # assign them through an `#assign_attributes` later. # They could be coming in as strings or symbols merge_params.to_h.with_indifferent_access.except!(*MergeRequest::KNOWN_MERGE_PARAMS) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe MergeRequests::AssignsMergeParams, feature_category: :code_review_workflow do it 'raises an error when used from an instance that does not respond to #current_user' do define_class = -> { Class.new { include MergeRequests::AssignsMergeParams }.new } expect { define_class.call }.to raise_error %r{can not be included in (.*) without implementing #current_user} end describe '#assign_allowed_merge_params' do let(:merge_request) { build(:merge_request) } let(:params) do { commit_message: 'Commit Message', 'should_remove_source_branch' => true, unknown_symbol: 'Unknown symbol', 'unknown_string' => 'Unknown String' } end subject(:merge_request_service) do Class.new do attr_accessor :current_user include MergeRequests::AssignsMergeParams def initialize(user) @current_user = user end end end it 'only assigns known parameters to the merge request' do service = merge_request_service.new(merge_request.author) service.assign_allowed_merge_params(merge_request, params) expect(merge_request.merge_params).to eq('commit_message' => 'Commit Message', 'should_remove_source_branch' => true) end it 'returns a hash without the known merge params' do service = merge_request_service.new(merge_request.author) result = service.assign_allowed_merge_params(merge_request, params) expect(result).to eq({ 'unknown_symbol' => 'Unknown symbol', 'unknown_string' => 'Unknown String' }) end context 'the force_remove_source_branch param' do let(:params) { { force_remove_source_branch: true } } it 'assigns the param if the user is allowed to do that' do service = merge_request_service.new(merge_request.author) result = service.assign_allowed_merge_params(merge_request, params) expect(merge_request.force_remove_source_branch?).to be true expect(result).to be_empty end it 'only removes the param if the user is not allowed to do that' do service = merge_request_service.new(build(:user)) result = service.assign_allowed_merge_params(merge_request, params) expect(merge_request.force_remove_source_branch?).to be_falsy expect(result).to be_empty end end end end
Write RSpec test file for following ruby class ```ruby # frozen_string_literal: true module Services # adapter for existing services over BaseServiceUtility - add error and # success methods returning ServiceResponse objects module ReturnServiceResponses def error(message, http_status, pass_back: {}) ServiceResponse.error(message: message, http_status: http_status, payload: pass_back) end def success(payload) ServiceResponse.success(payload: payload) end end end ```
# frozen_string_literal: true require 'spec_helper' RSpec.describe Services::ReturnServiceResponses, feature_category: :rate_limiting do subject(:object) { Class.new { include Services::ReturnServiceResponses }.new } let(:message) { 'a delivering message' } let(:payload) { 'string payload' } describe '#success' do it 'returns a ServiceResponse instance' do response = object.success(payload) expect(response).to be_an(ServiceResponse) expect(response).to be_success expect(response.message).to be_nil expect(response.payload).to eq(payload) expect(response.http_status).to eq(:ok) end end describe '#error' do it 'returns a ServiceResponse instance' do response = object.error(message, :not_found, pass_back: payload) expect(response).to be_an(ServiceResponse) expect(response).to be_error expect(response.message).to eq(message) expect(response.payload).to eq(payload) expect(response.http_status).to eq(:not_found) end end end